Compare commits
31 Commits
0.10.0-rc1
...
int-test-p
Author | SHA1 | Date | |
---|---|---|---|
0fdaf9fb15
|
|||
1574aa0631
|
|||
1723025fbf
|
|||
a2b678caf6
|
|||
0a371ec360 | |||
e58a716fe1 | |||
d09a19a385 | |||
cee808ff06 | |||
4326d1d259 | |||
b976872f77 | |||
7b6ea76437 | |||
9069758969 | |||
15d6b1a2a5 | |||
8a7fe4ca07
|
|||
64ad60663f | |||
cb3f46b46e | |||
41e514ae9a
|
|||
086b4828ff
|
|||
ed263854d4
|
|||
eb6fe4ba6e
|
|||
993172d31b | |||
c70b6e72a7 | |||
22e4dd7fca | |||
b6009057a8
|
|||
b978f04910
|
|||
3ac29d54d9
|
|||
877c17fab5
|
|||
f01fd26ce3
|
|||
273c165a41
|
|||
c88fc66c99
|
|||
9b271a6963
|
35
.drone.yml
35
.drone.yml
@ -3,12 +3,12 @@ kind: pipeline
|
||||
name: coopcloud.tech/abra
|
||||
steps:
|
||||
- name: make check
|
||||
image: golang:1.22
|
||||
image: golang:1.24
|
||||
commands:
|
||||
- make check
|
||||
|
||||
- name: make test
|
||||
image: golang:1.22
|
||||
image: golang:1.24
|
||||
environment:
|
||||
CATL_URL: https://git.coopcloud.tech/toolshed/recipes-catalogue-json.git
|
||||
commands:
|
||||
@ -60,7 +60,31 @@ steps:
|
||||
- make check
|
||||
- make test
|
||||
|
||||
- name: integration test
|
||||
- name: on-demand integration test
|
||||
image: appleboy/drone-ssh
|
||||
settings:
|
||||
host:
|
||||
- int.coopcloud.tech
|
||||
username: abra
|
||||
key:
|
||||
from_secret: abra_int_private_key
|
||||
port: 22
|
||||
command_timeout: 60m
|
||||
script_stop: true
|
||||
request_pty: true
|
||||
script:
|
||||
- |
|
||||
wget https://git.coopcloud.tech/toolshed/abra/raw/branch/main/scripts/tests/run-ci-int -O run-ci-int
|
||||
chmod +x run-ci-int
|
||||
sh run-ci-int
|
||||
when:
|
||||
ref:
|
||||
- int-*
|
||||
depends_on:
|
||||
- make check
|
||||
- make test
|
||||
|
||||
- name: nightly integration test
|
||||
image: appleboy/drone-ssh
|
||||
settings:
|
||||
host:
|
||||
@ -87,3 +111,8 @@ steps:
|
||||
volumes:
|
||||
- name: deps
|
||||
temp: {}
|
||||
|
||||
trigger:
|
||||
action:
|
||||
exclude:
|
||||
- synchronized
|
||||
|
@ -4,6 +4,7 @@
|
||||
> please do add yourself! This is a community project, let's show some 💞
|
||||
|
||||
- 3wordchant
|
||||
- ammaratef45
|
||||
- cassowary
|
||||
- codegod100
|
||||
- decentral1se
|
||||
@ -17,3 +18,5 @@
|
||||
- roxxers
|
||||
- vera
|
||||
- yksflip
|
||||
- basebuilder
|
||||
- mayel
|
||||
|
@ -1,5 +1,5 @@
|
||||
# Build image
|
||||
FROM golang:1.22-alpine AS build
|
||||
FROM golang:1.24-alpine AS build
|
||||
|
||||
ENV GOPRIVATE=coopcloud.tech
|
||||
|
||||
|
2
Makefile
2
Makefile
@ -2,7 +2,7 @@ ABRA := ./cmd/abra
|
||||
KADABRA := ./cmd/kadabra
|
||||
COMMIT := $(shell git rev-list -1 HEAD)
|
||||
GOPATH := $(shell go env GOPATH)
|
||||
GOVERSION := 1.22
|
||||
GOVERSION := 1.24
|
||||
LDFLAGS := "-X 'main.Commit=$(COMMIT)'"
|
||||
DIST_LDFLAGS := $(LDFLAGS)" -s -w"
|
||||
GCFLAGS := "all=-l -B"
|
||||
|
@ -261,7 +261,7 @@ func init() {
|
||||
AppCmdCommand.Flags().BoolVarP(
|
||||
&requestTTY,
|
||||
"tty",
|
||||
"t",
|
||||
"T",
|
||||
false,
|
||||
"request remote TTY",
|
||||
)
|
||||
|
@ -18,7 +18,7 @@ import (
|
||||
"coopcloud.tech/abra/pkg/log"
|
||||
"coopcloud.tech/abra/pkg/upstream/container"
|
||||
"github.com/docker/cli/cli/command"
|
||||
"github.com/docker/docker/api/types"
|
||||
containertypes "github.com/docker/docker/api/types/container"
|
||||
dockerClient "github.com/docker/docker/client"
|
||||
"github.com/docker/docker/errdefs"
|
||||
"github.com/docker/docker/pkg/archive"
|
||||
@ -134,7 +134,7 @@ func CopyToContainer(cl *dockerClient.Client, containerID, srcPath, dstPath stri
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := container.RunExec(dcli, cl, containerID, &types.ExecConfig{
|
||||
if _, err := container.RunExec(dcli, cl, containerID, &containertypes.ExecOptions{
|
||||
AttachStderr: true,
|
||||
AttachStdin: true,
|
||||
AttachStdout: true,
|
||||
@ -162,7 +162,7 @@ func CopyToContainer(cl *dockerClient.Client, containerID, srcPath, dstPath stri
|
||||
}
|
||||
|
||||
log.Debugf("copy %s from local to %s on container", srcPath, dstPath)
|
||||
copyOpts := types.CopyToContainerOptions{AllowOverwriteDirWithFile: false, CopyUIDGID: false}
|
||||
copyOpts := containertypes.CopyToContainerOptions{AllowOverwriteDirWithFile: false, CopyUIDGID: false}
|
||||
if err := cl.CopyToContainer(context.Background(), containerID, dstPath, content, copyOpts); err != nil {
|
||||
return err
|
||||
}
|
||||
@ -173,7 +173,7 @@ func CopyToContainer(cl *dockerClient.Client, containerID, srcPath, dstPath stri
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := container.RunExec(dcli, cl, containerID, &types.ExecConfig{
|
||||
if _, err := container.RunExec(dcli, cl, containerID, &containertypes.ExecOptions{
|
||||
AttachStderr: true,
|
||||
AttachStdin: true,
|
||||
AttachStdout: true,
|
||||
|
@ -3,6 +3,7 @@ package app
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"coopcloud.tech/abra/cli/internal"
|
||||
"coopcloud.tech/abra/pkg/app"
|
||||
@ -46,7 +47,8 @@ checkout as-is. Recipe commit hashes are also supported as values for
|
||||
ValidArgsFunction: func(
|
||||
cmd *cobra.Command,
|
||||
args []string,
|
||||
toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
toComplete string,
|
||||
) ([]string, cobra.ShellCompDirective) {
|
||||
switch l := len(args); l {
|
||||
case 0:
|
||||
return autocomplete.AppNameComplete()
|
||||
@ -63,10 +65,8 @@ checkout as-is. Recipe commit hashes are also supported as values for
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
var (
|
||||
deployWarnMessages []string
|
||||
toDeployVersion string
|
||||
isChaosCommit bool
|
||||
toDeployChaosVersion = config.CHAOS_DEFAULT
|
||||
deployWarnMessages []string
|
||||
toDeployVersion string
|
||||
)
|
||||
|
||||
app := internal.ValidateApp(args)
|
||||
@ -79,10 +79,6 @@ checkout as-is. Recipe commit hashes are also supported as values for
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if err := lint.LintForErrors(app.Recipe); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
cl, err := client.New(app.Server)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
@ -99,46 +95,20 @@ checkout as-is. Recipe commit hashes are also supported as values for
|
||||
log.Fatalf("%s is already deployed", app.Name)
|
||||
}
|
||||
|
||||
if len(args) == 2 && args[1] != "" {
|
||||
toDeployVersion = args[1]
|
||||
}
|
||||
|
||||
if !deployMeta.IsDeployed &&
|
||||
toDeployVersion == "" &&
|
||||
app.Recipe.EnvVersion != "" && !internal.IgnoreEnvVersion {
|
||||
log.Debugf("new deployment, choosing .env version: %s", app.Recipe.EnvVersion)
|
||||
toDeployVersion = app.Recipe.EnvVersion
|
||||
}
|
||||
|
||||
if !internal.Chaos && toDeployVersion == "" {
|
||||
if err := getLatestVersionOrCommit(app, &toDeployVersion); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
if internal.Chaos {
|
||||
if err := getChaosVersion(app, &toDeployVersion, &toDeployChaosVersion); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
toDeployVersion, err = getDeployVersion(args, deployMeta, app)
|
||||
if err != nil {
|
||||
log.Fatal(fmt.Errorf("get deploy version: %s", err))
|
||||
}
|
||||
|
||||
if !internal.Chaos {
|
||||
isChaosCommit, err = app.Recipe.EnsureVersion(toDeployVersion)
|
||||
_, err = app.Recipe.EnsureVersion(toDeployVersion)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
log.Fatalf("ensure recipe: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
if isChaosCommit {
|
||||
log.Debugf("assuming chaos commit: %s", toDeployVersion)
|
||||
|
||||
internal.Chaos = true
|
||||
toDeployChaosVersion = toDeployVersion
|
||||
|
||||
toDeployVersion, err = app.Recipe.GetVersionLabelLocal()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
if err := lint.LintForErrors(app.Recipe); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if err := validateSecrets(cl, app); err != nil {
|
||||
@ -171,16 +141,14 @@ checkout as-is. Recipe commit hashes are also supported as values for
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
toDeployChaosVersionLabel := toDeployChaosVersion
|
||||
if app.Recipe.Dirty {
|
||||
toDeployChaosVersionLabel = formatter.AddDirtyMarker(toDeployChaosVersionLabel)
|
||||
}
|
||||
|
||||
appPkg.ExposeAllEnv(stackName, compose, app.Env)
|
||||
appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name)
|
||||
appPkg.SetChaosLabel(compose, stackName, internal.Chaos)
|
||||
appPkg.SetChaosVersionLabel(compose, stackName, toDeployChaosVersionLabel)
|
||||
if internal.Chaos {
|
||||
appPkg.SetChaosVersionLabel(compose, stackName, toDeployVersion)
|
||||
}
|
||||
appPkg.SetUpdateLabel(compose, stackName, app.Env)
|
||||
appPkg.SetVersionLabel(compose, stackName, toDeployVersion)
|
||||
|
||||
envVars, err := appPkg.CheckEnv(app)
|
||||
if err != nil {
|
||||
@ -212,19 +180,12 @@ checkout as-is. Recipe commit hashes are also supported as values for
|
||||
deployedVersion = deployMeta.Version
|
||||
}
|
||||
|
||||
toWriteVersion := toDeployVersion
|
||||
if internal.Chaos || isChaosCommit {
|
||||
toWriteVersion = toDeployChaosVersion
|
||||
}
|
||||
|
||||
if err := internal.DeployOverview(
|
||||
app,
|
||||
deployWarnMessages,
|
||||
deployedVersion,
|
||||
deployMeta.ChaosVersion,
|
||||
toDeployVersion,
|
||||
toDeployChaosVersion,
|
||||
toWriteVersion,
|
||||
"",
|
||||
deployWarnMessages,
|
||||
); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
@ -248,53 +209,28 @@ checkout as-is. Recipe commit hashes are also supported as values for
|
||||
}
|
||||
}
|
||||
|
||||
if err := app.WriteRecipeVersion(toWriteVersion, false); err != nil {
|
||||
if err := app.WriteRecipeVersion(toDeployVersion, false); err != nil {
|
||||
log.Fatalf("writing recipe version failed: %s", err)
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func getChaosVersion(app app.App, toDeployVersion, toDeployChaosVersion *string) error {
|
||||
var err error
|
||||
*toDeployChaosVersion, err = app.Recipe.ChaosVersion()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*toDeployVersion, err = app.Recipe.GetVersionLabelLocal()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getLatestVersionOrCommit(app app.App, toDeployVersion *string) error {
|
||||
func getLatestVersionOrCommit(app app.App) (string, error) {
|
||||
versions, err := app.Recipe.Tags()
|
||||
if err != nil {
|
||||
return err
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(versions) > 0 && !internal.Chaos {
|
||||
*toDeployVersion = versions[len(versions)-1]
|
||||
|
||||
log.Debugf("choosing %s as version to deploy", *toDeployVersion)
|
||||
|
||||
if _, err := app.Recipe.EnsureVersion(*toDeployVersion); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
return versions[len(versions)-1], nil
|
||||
}
|
||||
|
||||
head, err := app.Recipe.Head()
|
||||
if err != nil {
|
||||
return err
|
||||
return "", err
|
||||
}
|
||||
|
||||
*toDeployVersion = formatter.SmallSHA(head.String())
|
||||
|
||||
return nil
|
||||
return formatter.SmallSHA(head.String()), nil
|
||||
}
|
||||
|
||||
// validateArgsAndFlags ensures compatible args/flags.
|
||||
@ -321,6 +257,46 @@ func validateSecrets(cl *dockerClient.Client, app app.App) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func getDeployVersion(cliArgs []string, deployMeta stack.DeployMeta, app app.App) (string, error) {
|
||||
// Chaos mode overrides everything
|
||||
if internal.Chaos {
|
||||
v, err := app.Recipe.ChaosVersion()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
log.Debugf("version: taking chaos version: %s", v)
|
||||
return v, nil
|
||||
}
|
||||
|
||||
// Check if the deploy version is set with a cli argument
|
||||
if len(cliArgs) == 2 && cliArgs[1] != "" {
|
||||
log.Debugf("version: taking version from cli arg: %s", cliArgs[1])
|
||||
return cliArgs[1], nil
|
||||
}
|
||||
|
||||
// Check if the recipe has a version in the .env file
|
||||
if app.Recipe.EnvVersion != "" && !internal.IgnoreEnvVersion {
|
||||
if strings.HasSuffix(app.Recipe.EnvVersionRaw, "+U") {
|
||||
return "", fmt.Errorf("version: can not redeploy chaos version %s", app.Recipe.EnvVersionRaw)
|
||||
}
|
||||
log.Debugf("version: taking version from .env file: %s", app.Recipe.EnvVersion)
|
||||
return app.Recipe.EnvVersion, nil
|
||||
}
|
||||
|
||||
// Take deployed version
|
||||
if deployMeta.IsDeployed {
|
||||
log.Debugf("version: taking deployed version: %s", deployMeta.Version)
|
||||
return deployMeta.Version, nil
|
||||
}
|
||||
|
||||
v, err := getLatestVersionOrCommit(app)
|
||||
log.Debugf("version: taking new recipe version: %s", v)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
AppDeployCommand.Flags().BoolVarP(
|
||||
&internal.Chaos,
|
||||
|
@ -75,43 +75,41 @@ var AppNewCommand = &cobra.Command{
|
||||
|
||||
chaosVersion := config.CHAOS_DEFAULT
|
||||
if internal.Chaos {
|
||||
recipeVersion = chaosVersion
|
||||
|
||||
if !internal.Offline {
|
||||
if err := recipe.EnsureUpToDate(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !internal.Chaos {
|
||||
if err := recipe.EnsureIsClean(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
var recipeVersions recipePkg.RecipeVersions
|
||||
if recipeVersion == "" {
|
||||
var err error
|
||||
recipeVersions, _, err = recipe.GetRecipeVersions()
|
||||
chaosVersion, err = recipe.ChaosVersion()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(recipeVersions) > 0 {
|
||||
latest := recipeVersions[len(recipeVersions)-1]
|
||||
for tag := range latest {
|
||||
recipeVersion = tag
|
||||
}
|
||||
|
||||
if _, err := recipe.EnsureVersion(recipeVersion); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
recipeVersion = chaosVersion
|
||||
} else {
|
||||
if err := recipe.EnsureLatest(); err != nil {
|
||||
if err := recipe.EnsureIsClean(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
var recipeVersions recipePkg.RecipeVersions
|
||||
if recipeVersion == "" {
|
||||
var err error
|
||||
recipeVersions, _, err = recipe.GetRecipeVersions()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(recipeVersions) > 0 {
|
||||
latest := recipeVersions[len(recipeVersions)-1]
|
||||
for tag := range latest {
|
||||
recipeVersion = tag
|
||||
}
|
||||
|
||||
if _, err := recipe.EnsureVersion(recipeVersion); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
} else {
|
||||
if err := recipe.EnsureLatest(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := ensureServerFlag(); err != nil {
|
||||
|
@ -143,10 +143,10 @@ func showPSOutput(app appPkg.App, cl *dockerClient.Client, deployedVersion, chao
|
||||
|
||||
row := []string{
|
||||
containerStats["service"],
|
||||
containerStats["status"],
|
||||
containerStats["image"],
|
||||
dVersion,
|
||||
cVersion,
|
||||
containerStats["status"],
|
||||
}
|
||||
|
||||
rows = append(rows, row)
|
||||
@ -170,10 +170,10 @@ func showPSOutput(app appPkg.App, cl *dockerClient.Client, deployedVersion, chao
|
||||
|
||||
headers := []string{
|
||||
"SERVICE",
|
||||
"STATUS",
|
||||
"IMAGE",
|
||||
"VERSION",
|
||||
"CHAOS",
|
||||
"STATUS",
|
||||
}
|
||||
|
||||
table.
|
||||
|
@ -123,6 +123,13 @@ Pass "--all-services/-a" to restart all services.`,
|
||||
var allServices bool
|
||||
|
||||
func init() {
|
||||
AppRestartCommand.Flags().BoolVarP(
|
||||
&internal.Chaos,
|
||||
"chaos",
|
||||
"C",
|
||||
false,
|
||||
"ignore uncommitted recipes changes",
|
||||
)
|
||||
AppRestartCommand.Flags().BoolVarP(
|
||||
&allServices,
|
||||
"all-services",
|
||||
|
@ -178,23 +178,18 @@ beforehand. See "abra app backup" for more.`,
|
||||
appPkg.ExposeAllEnv(stackName, compose, app.Env)
|
||||
appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name)
|
||||
appPkg.SetChaosLabel(compose, stackName, internal.Chaos)
|
||||
appPkg.SetChaosVersionLabel(compose, stackName, chosenDowngrade)
|
||||
if internal.Chaos {
|
||||
appPkg.SetChaosVersionLabel(compose, stackName, chosenDowngrade)
|
||||
}
|
||||
appPkg.SetUpdateLabel(compose, stackName, app.Env)
|
||||
|
||||
chaosVersion := config.CHAOS_DEFAULT
|
||||
if deployMeta.IsChaos {
|
||||
chaosVersion = deployMeta.ChaosVersion
|
||||
}
|
||||
|
||||
// NOTE(d1): no release notes implemeneted for rolling back
|
||||
if err := internal.NewVersionOverview(
|
||||
if err := internal.DeployOverview(
|
||||
app,
|
||||
downgradeWarnMessages,
|
||||
"rollback",
|
||||
deployMeta.Version,
|
||||
chaosVersion,
|
||||
chosenDowngrade,
|
||||
"",
|
||||
downgradeWarnMessages,
|
||||
); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
@ -247,7 +242,7 @@ func validateDowngradeVersionArg(
|
||||
) error {
|
||||
parsedDeployedVersion, err := tagcmp.Parse(deployMeta.Version)
|
||||
if err != nil {
|
||||
return fmt.Errorf("'%s' is not a known version for %s", deployMeta.Version, app.Recipe.Name)
|
||||
return fmt.Errorf("current deployment '%s' is not a known version for %s", deployMeta.Version, app.Recipe.Name)
|
||||
}
|
||||
|
||||
parsedSpecificVersion, err := tagcmp.Parse(specificVersion)
|
||||
|
@ -11,7 +11,7 @@ import (
|
||||
"coopcloud.tech/abra/pkg/log"
|
||||
"coopcloud.tech/abra/pkg/upstream/container"
|
||||
"github.com/docker/cli/cli/command"
|
||||
"github.com/docker/docker/api/types"
|
||||
containertypes "github.com/docker/docker/api/types/container"
|
||||
"github.com/docker/docker/api/types/filters"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
@ -64,7 +64,7 @@ var AppRunCommand = &cobra.Command{
|
||||
}
|
||||
|
||||
userCmd := args[2:]
|
||||
execCreateOpts := types.ExecConfig{
|
||||
execCreateOpts := containertypes.ExecOptions{
|
||||
AttachStderr: true,
|
||||
AttachStdin: true,
|
||||
AttachStdout: true,
|
||||
|
@ -49,11 +49,11 @@ var AppSecretGenerateCommand = &cobra.Command{
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if len(args) == 1 && !generateAllSecrets {
|
||||
if len(args) <= 2 && !generateAllSecrets {
|
||||
log.Fatal("missing arguments [secret]/[version] or '--all'")
|
||||
}
|
||||
|
||||
if len(args) > 1 && generateAllSecrets {
|
||||
if len(args) > 2 && generateAllSecrets {
|
||||
log.Fatal("cannot use '[secret] [version]' and '--all' together")
|
||||
}
|
||||
|
||||
|
@ -54,21 +54,12 @@ Passing "--prune/-p" does not remove those volumes.`,
|
||||
log.Fatalf("%s is not deployed?", app.Name)
|
||||
}
|
||||
|
||||
chaosVersion := config.CHAOS_DEFAULT
|
||||
if deployMeta.IsChaos {
|
||||
chaosVersion = deployMeta.ChaosVersion
|
||||
}
|
||||
|
||||
toWriteVersion := deployMeta.Version
|
||||
if deployMeta.IsChaos {
|
||||
toWriteVersion = chaosVersion
|
||||
}
|
||||
|
||||
if err := internal.UndeployOverview(
|
||||
if err := internal.DeployOverview(
|
||||
app,
|
||||
deployMeta.Version,
|
||||
chaosVersion,
|
||||
toWriteVersion,
|
||||
config.NO_DOMAIN_DEFAULT,
|
||||
"",
|
||||
nil,
|
||||
); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
@ -87,7 +78,7 @@ Passing "--prune/-p" does not remove those volumes.`,
|
||||
}
|
||||
}
|
||||
|
||||
if err := app.WriteRecipeVersion(toWriteVersion, false); err != nil {
|
||||
if err := app.WriteRecipeVersion(deployMeta.Version, false); err != nil {
|
||||
log.Fatalf("writing recipe version failed: %s", err)
|
||||
}
|
||||
},
|
||||
|
@ -43,7 +43,8 @@ beforehand. See "abra app backup" for more.`,
|
||||
ValidArgsFunction: func(
|
||||
cmd *cobra.Command,
|
||||
args []string,
|
||||
toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
toComplete string,
|
||||
) ([]string, cobra.ShellCompDirective) {
|
||||
switch l := len(args); l {
|
||||
case 0:
|
||||
return autocomplete.AppNameComplete()
|
||||
@ -183,7 +184,9 @@ beforehand. See "abra app backup" for more.`,
|
||||
appPkg.ExposeAllEnv(stackName, compose, app.Env)
|
||||
appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name)
|
||||
appPkg.SetChaosLabel(compose, stackName, internal.Chaos)
|
||||
appPkg.SetChaosVersionLabel(compose, stackName, chosenUpgrade)
|
||||
if internal.Chaos {
|
||||
appPkg.SetChaosVersionLabel(compose, stackName, chosenUpgrade)
|
||||
}
|
||||
appPkg.SetUpdateLabel(compose, stackName, app.Env)
|
||||
|
||||
envVars, err := appPkg.CheckEnv(app)
|
||||
@ -204,23 +207,21 @@ beforehand. See "abra app backup" for more.`,
|
||||
return
|
||||
}
|
||||
|
||||
chaosVersion := config.CHAOS_DEFAULT
|
||||
if deployMeta.IsChaos {
|
||||
chaosVersion = deployMeta.ChaosVersion
|
||||
|
||||
if deployMeta.ChaosVersion == "" {
|
||||
chaosVersion = config.UNKNOWN_DEFAULT
|
||||
}
|
||||
if upgradeReleaseNotes != "" && chosenUpgrade != "" {
|
||||
fmt.Print(upgradeReleaseNotes)
|
||||
} else {
|
||||
upgradeWarnMessages = append(
|
||||
upgradeWarnMessages,
|
||||
fmt.Sprintf("no release notes available for %s", chosenUpgrade),
|
||||
)
|
||||
}
|
||||
|
||||
if err := internal.NewVersionOverview(
|
||||
if err := internal.DeployOverview(
|
||||
app,
|
||||
upgradeWarnMessages,
|
||||
"upgrade",
|
||||
deployMeta.Version,
|
||||
chaosVersion,
|
||||
chosenUpgrade,
|
||||
upgradeReleaseNotes,
|
||||
upgradeWarnMessages,
|
||||
); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
@ -363,7 +364,7 @@ func validateUpgradeVersionArg(
|
||||
|
||||
parsedDeployedVersion, err := tagcmp.Parse(deployMeta.Version)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("'%s' is not a known version", deployMeta.Version)
|
||||
}
|
||||
|
||||
if parsedSpecificVersion.IsLessThan(parsedDeployedVersion) &&
|
||||
@ -395,9 +396,7 @@ func ensureDeployed(cl *dockerClient.Client, app app.App) (stack.DeployMeta, err
|
||||
return deployMeta, nil
|
||||
}
|
||||
|
||||
var (
|
||||
showReleaseNotes bool
|
||||
)
|
||||
var showReleaseNotes bool
|
||||
|
||||
func init() {
|
||||
AppUpgradeCommand.Flags().BoolVarP(
|
||||
|
@ -25,6 +25,11 @@ var CatalogueGenerateCommand = &cobra.Command{
|
||||
Short: "Generate the recipe catalogue",
|
||||
Long: `Generate a new copy of the recipe catalogue.
|
||||
|
||||
N.B. this command **will** wipe local unstaged changes from your local recipes
|
||||
if present. "--chaos/-C" on this command refers to the catalogue repository
|
||||
("$ABRA_DIR/catalogue") and not the recipes. Please take care not to lose your
|
||||
changes.
|
||||
|
||||
It is possible to generate new metadata for a single recipe by passing
|
||||
[recipe]. The existing local catalogue will be updated, not overwritten.
|
||||
|
||||
|
@ -12,17 +12,16 @@ var AutocompleteCommand = &cobra.Command{
|
||||
Long: `To load completions:
|
||||
|
||||
Bash:
|
||||
|
||||
# Load autocompletion for the current Bash session
|
||||
$ source <(abra autocomplete bash)
|
||||
|
||||
# To load autocompletion for each session, execute once:
|
||||
# Linux:
|
||||
$ abra autocomplete bash > /etc/bash_completion.d/abra
|
||||
$ abra autocomplete bash | sudo tee /etc/bash_completion.d/abra
|
||||
# macOS:
|
||||
$ abra autocomplete bash > $(brew --prefix)/etc/bash_completion.d/abra
|
||||
$ abra autocomplete bash | sudo tee $(brew --prefix)/etc/bash_completion.d/abra
|
||||
|
||||
Zsh:
|
||||
|
||||
# If shell autocompletion is not already enabled in your environment,
|
||||
# you will need to enable it. You can execute the following once:
|
||||
|
||||
@ -34,14 +33,12 @@ Zsh:
|
||||
# You will need to start a new shell for this setup to take effect.
|
||||
|
||||
fish:
|
||||
|
||||
$ abra autocomplete fish | source
|
||||
|
||||
# To load autocompletions for each session, execute once:
|
||||
$ abra autocomplete fish > ~/.config/fish/completions/abra.fish
|
||||
|
||||
PowerShell:
|
||||
|
||||
PS> abra autocomplete powershell | Out-String | Invoke-Expression
|
||||
|
||||
# To load autocompletions for every new session, run:
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
"coopcloud.tech/abra/pkg/upstream/container"
|
||||
"github.com/docker/cli/cli/command"
|
||||
"github.com/docker/docker/api/types"
|
||||
containertypes "github.com/docker/docker/api/types/container"
|
||||
"github.com/docker/docker/api/types/filters"
|
||||
dockerClient "github.com/docker/docker/client"
|
||||
)
|
||||
@ -47,7 +48,7 @@ func RunBackupCmdRemote(
|
||||
backupCmd string,
|
||||
containerID string,
|
||||
execEnv []string) (io.Writer, error) {
|
||||
execBackupListOpts := types.ExecConfig{
|
||||
execBackupListOpts := containertypes.ExecOptions{
|
||||
AttachStderr: true,
|
||||
AttachStdin: true,
|
||||
AttachStdout: true,
|
||||
|
@ -14,7 +14,7 @@ import (
|
||||
"coopcloud.tech/abra/pkg/log"
|
||||
"coopcloud.tech/abra/pkg/upstream/container"
|
||||
"github.com/docker/cli/cli/command"
|
||||
"github.com/docker/docker/api/types"
|
||||
containertypes "github.com/docker/docker/api/types/container"
|
||||
"github.com/docker/docker/api/types/filters"
|
||||
dockerClient "github.com/docker/docker/client"
|
||||
"github.com/docker/docker/pkg/archive"
|
||||
@ -42,7 +42,7 @@ func RunCmdRemote(
|
||||
return err
|
||||
}
|
||||
|
||||
copyOpts := types.CopyToContainerOptions{AllowOverwriteDirWithFile: false, CopyUIDGID: false}
|
||||
copyOpts := containertypes.CopyToContainerOptions{AllowOverwriteDirWithFile: false, CopyUIDGID: false}
|
||||
if err := cl.CopyToContainer(context.Background(), targetContainer.ID, "/tmp", content, copyOpts); err != nil {
|
||||
return err
|
||||
}
|
||||
@ -55,7 +55,7 @@ func RunCmdRemote(
|
||||
|
||||
shell := "/bin/bash"
|
||||
findShell := []string{"test", "-e", shell}
|
||||
execCreateOpts := types.ExecConfig{
|
||||
execCreateOpts := containertypes.ExecOptions{
|
||||
AttachStderr: true,
|
||||
AttachStdin: true,
|
||||
AttachStdout: true,
|
||||
|
@ -10,7 +10,6 @@ import (
|
||||
"coopcloud.tech/abra/pkg/config"
|
||||
"coopcloud.tech/abra/pkg/formatter"
|
||||
"coopcloud.tech/abra/pkg/log"
|
||||
"coopcloud.tech/abra/pkg/recipe"
|
||||
"coopcloud.tech/tagcmp"
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
@ -38,100 +37,21 @@ func horizontal(left, mid, right string) string {
|
||||
return lipgloss.JoinHorizontal(lipgloss.Left, left, mid, right)
|
||||
}
|
||||
|
||||
// NewVersionOverview shows an upgrade or downgrade overview
|
||||
func NewVersionOverview(
|
||||
app appPkg.App,
|
||||
warnMessages []string,
|
||||
kind,
|
||||
deployedVersion,
|
||||
deployedChaosVersion,
|
||||
toDeployVersion,
|
||||
releaseNotes string) error {
|
||||
deployConfig := "compose.yml"
|
||||
if composeFiles, ok := app.Env["COMPOSE_FILE"]; ok {
|
||||
deployConfig = composeFiles
|
||||
}
|
||||
|
||||
server := app.Server
|
||||
if app.Server == "default" {
|
||||
server = "local"
|
||||
}
|
||||
|
||||
domain := app.Domain
|
||||
if domain == "" {
|
||||
domain = config.NO_DOMAIN_DEFAULT
|
||||
}
|
||||
|
||||
upperKind := strings.ToUpper(kind)
|
||||
|
||||
rows := [][]string{
|
||||
{"DOMAIN", domain},
|
||||
{"RECIPE", app.Recipe.Name},
|
||||
{"SERVER", server},
|
||||
{"CONFIG", deployConfig},
|
||||
|
||||
{"CURRENT DEPLOYMENT", "---"},
|
||||
{"VERSION", formatter.BoldDirtyDefault(deployedVersion)},
|
||||
{"CHAOS ", formatter.BoldDirtyDefault(deployedChaosVersion)},
|
||||
|
||||
{upperKind, "---"},
|
||||
{"VERSION", formatter.BoldDirtyDefault(toDeployVersion)},
|
||||
|
||||
{fmt.Sprintf("%s.ENV", strings.ToUpper(app.Domain)), "---"},
|
||||
{"CURRENT VERSION", formatter.BoldDirtyDefault(app.Recipe.EnvVersion)},
|
||||
{"NEW VERSION", formatter.BoldDirtyDefault(toDeployVersion)},
|
||||
}
|
||||
|
||||
overview := formatter.CreateOverview(
|
||||
fmt.Sprintf("%s OVERVIEW", upperKind),
|
||||
rows,
|
||||
)
|
||||
|
||||
fmt.Println(overview)
|
||||
|
||||
if releaseNotes != "" && toDeployVersion != "" {
|
||||
fmt.Print(releaseNotes)
|
||||
} else {
|
||||
warnMessages = append(
|
||||
warnMessages,
|
||||
fmt.Sprintf("no release notes available for %s", toDeployVersion),
|
||||
)
|
||||
}
|
||||
|
||||
for _, msg := range warnMessages {
|
||||
log.Warn(msg)
|
||||
}
|
||||
|
||||
if NoInput {
|
||||
return nil
|
||||
}
|
||||
|
||||
response := false
|
||||
prompt := &survey.Confirm{Message: "proceed?"}
|
||||
if err := survey.AskOne(prompt, &response); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !response {
|
||||
log.Fatal("deployment cancelled")
|
||||
}
|
||||
|
||||
return nil
|
||||
func formatComposeFiles(composeFiles string) string {
|
||||
return strings.ReplaceAll(composeFiles, ":", "\n")
|
||||
}
|
||||
|
||||
// DeployOverview shows a deployment overview
|
||||
func DeployOverview(
|
||||
app appPkg.App,
|
||||
warnMessages []string,
|
||||
deployedVersion string,
|
||||
deployedChaosVersion string,
|
||||
toDeployVersion,
|
||||
toDeployChaosVersion string,
|
||||
toWriteVersion string,
|
||||
toDeployVersion string,
|
||||
info string,
|
||||
warnMessages []string,
|
||||
) error {
|
||||
deployConfig := "compose.yml"
|
||||
if composeFiles, ok := app.Env["COMPOSE_FILE"]; ok {
|
||||
deployConfig = composeFiles
|
||||
deployConfig = formatComposeFiles(composeFiles)
|
||||
}
|
||||
|
||||
server := app.Server
|
||||
@ -144,21 +64,7 @@ func DeployOverview(
|
||||
domain = config.NO_DOMAIN_DEFAULT
|
||||
}
|
||||
|
||||
if app.Recipe.Dirty {
|
||||
toWriteVersion = formatter.AddDirtyMarker(toWriteVersion)
|
||||
toDeployChaosVersion = formatter.AddDirtyMarker(toDeployChaosVersion)
|
||||
}
|
||||
|
||||
recipeName, exists := app.Env["RECIPE"]
|
||||
if !exists {
|
||||
recipeName = app.Env["TYPE"]
|
||||
}
|
||||
|
||||
envVersion, err := recipe.GetEnvVersionRaw(recipeName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
envVersion := app.Recipe.EnvVersionRaw
|
||||
if envVersion == "" {
|
||||
envVersion = config.NO_VERSION_DEFAULT
|
||||
}
|
||||
@ -168,24 +74,21 @@ func DeployOverview(
|
||||
{"RECIPE", app.Recipe.Name},
|
||||
{"SERVER", server},
|
||||
{"CONFIG", deployConfig},
|
||||
|
||||
{"CURRENT DEPLOYMENT", "---"},
|
||||
{"VERSION", formatter.BoldDirtyDefault(deployedVersion)},
|
||||
{"CHAOS", formatter.BoldDirtyDefault(deployedChaosVersion)},
|
||||
|
||||
{"NEW DEPLOYMENT", "---"},
|
||||
{"VERSION", formatter.BoldDirtyDefault(toDeployVersion)},
|
||||
{"CHAOS", formatter.BoldDirtyDefault(toDeployChaosVersion)},
|
||||
|
||||
{fmt.Sprintf("%s.ENV", strings.ToUpper(app.Name)), "---"},
|
||||
{"CURRENT VERSION", formatter.BoldDirtyDefault(envVersion)},
|
||||
{"NEW VERSION", formatter.BoldDirtyDefault(toWriteVersion)},
|
||||
{"", ""},
|
||||
{"CURRENT DEPLOYMENT", formatter.BoldDirtyDefault(deployedVersion)},
|
||||
{"ENV VERSION", formatter.BoldDirtyDefault(envVersion)},
|
||||
{"NEW DEPLOYMENT", formatter.BoldDirtyDefault(toDeployVersion)},
|
||||
}
|
||||
|
||||
overview := formatter.CreateOverview("DEPLOY OVERVIEW", rows)
|
||||
deployType := getDeployType(deployedVersion, toDeployVersion)
|
||||
overview := formatter.CreateOverview(fmt.Sprintf("%s OVERVIEW", deployType), rows)
|
||||
|
||||
fmt.Println(overview)
|
||||
|
||||
if info != "" {
|
||||
fmt.Println(info)
|
||||
}
|
||||
|
||||
for _, msg := range warnMessages {
|
||||
log.Warn(msg)
|
||||
}
|
||||
@ -207,76 +110,34 @@ func DeployOverview(
|
||||
return nil
|
||||
}
|
||||
|
||||
// UndeployOverview shows an undeployment overview
|
||||
func UndeployOverview(
|
||||
app appPkg.App,
|
||||
deployedVersion,
|
||||
deployedChaosVersion,
|
||||
toWriteVersion string,
|
||||
) error {
|
||||
deployConfig := "compose.yml"
|
||||
if composeFiles, ok := app.Env["COMPOSE_FILE"]; ok {
|
||||
deployConfig = composeFiles
|
||||
func getDeployType(currentVersion, newVersion string) string {
|
||||
if newVersion == config.NO_DOMAIN_DEFAULT {
|
||||
return "UNDEPLOY"
|
||||
}
|
||||
|
||||
server := app.Server
|
||||
if app.Server == "default" {
|
||||
server = "local"
|
||||
if strings.Contains(newVersion, "+U") {
|
||||
return "CHAOS DEPLOY"
|
||||
}
|
||||
|
||||
domain := app.Domain
|
||||
if domain == "" {
|
||||
domain = config.NO_DOMAIN_DEFAULT
|
||||
if strings.Contains(currentVersion, "+U") {
|
||||
return "UNCHAOS DEPLOY"
|
||||
}
|
||||
|
||||
recipeName, exists := app.Env["RECIPE"]
|
||||
if !exists {
|
||||
recipeName = app.Env["TYPE"]
|
||||
if currentVersion == newVersion {
|
||||
return "REDEPLOY"
|
||||
}
|
||||
|
||||
envVersion, err := recipe.GetEnvVersionRaw(recipeName)
|
||||
if currentVersion == config.NO_VERSION_DEFAULT {
|
||||
return "NEW DEPLOY"
|
||||
}
|
||||
currentParsed, err := tagcmp.Parse(currentVersion)
|
||||
if err != nil {
|
||||
return err
|
||||
return "DEPLOY"
|
||||
}
|
||||
|
||||
if envVersion == "" {
|
||||
envVersion = config.NO_VERSION_DEFAULT
|
||||
newParsed, err := tagcmp.Parse(newVersion)
|
||||
if err != nil {
|
||||
return "DEPLOY"
|
||||
}
|
||||
|
||||
rows := [][]string{
|
||||
{"DOMAIN", domain},
|
||||
{"RECIPE", app.Recipe.Name},
|
||||
{"SERVER", server},
|
||||
{"CONFIG", deployConfig},
|
||||
|
||||
{"CURRENT DEPLOYMENT", "---"},
|
||||
{"VERSION", formatter.BoldDirtyDefault(deployedVersion)},
|
||||
{"CHAOS", formatter.BoldDirtyDefault(deployedChaosVersion)},
|
||||
|
||||
{fmt.Sprintf("%s.ENV", strings.ToUpper(app.Name)), "---"},
|
||||
{"CURRENT VERSION", formatter.BoldDirtyDefault(envVersion)},
|
||||
{"NEW VERSION", formatter.BoldDirtyDefault(toWriteVersion)},
|
||||
if currentParsed.IsLessThan(newParsed) {
|
||||
return "UPGRADE"
|
||||
}
|
||||
|
||||
overview := formatter.CreateOverview("UNDEPLOY OVERVIEW", rows)
|
||||
|
||||
fmt.Println(overview)
|
||||
|
||||
if NoInput {
|
||||
return nil
|
||||
}
|
||||
|
||||
response := false
|
||||
prompt := &survey.Confirm{Message: "proceed?"}
|
||||
if err := survey.AskOne(prompt, &response); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !response {
|
||||
log.Fatal("undeploy cancelled")
|
||||
}
|
||||
|
||||
return nil
|
||||
return "DOWNGRADE"
|
||||
}
|
||||
|
||||
// PostCmds parses a string of commands and executes them inside of the respective services
|
||||
|
@ -267,6 +267,8 @@ func addReleaseNotes(recipe recipe.Recipe, tag string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
var addNextAsReleaseNotes bool
|
||||
|
||||
nextReleaseNotePath := path.Join(releaseDir, "next")
|
||||
if _, err := os.Stat(nextReleaseNotePath); err == nil {
|
||||
// release/next note exists. Move it to release/<tag>
|
||||
@ -276,38 +278,37 @@ func addReleaseNotes(recipe recipe.Recipe, tag string) error {
|
||||
}
|
||||
|
||||
if !internal.NoInput {
|
||||
prompt := &survey.Input{
|
||||
prompt := &survey.Confirm{
|
||||
Message: "Use release note in release/next?",
|
||||
}
|
||||
var addReleaseNote bool
|
||||
if err := survey.AskOne(prompt, &addReleaseNote); err != nil {
|
||||
|
||||
if err := survey.AskOne(prompt, &addNextAsReleaseNotes); err != nil {
|
||||
return err
|
||||
}
|
||||
if !addReleaseNote {
|
||||
|
||||
if !addNextAsReleaseNotes {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
err := os.Rename(nextReleaseNotePath, tagReleaseNotePath)
|
||||
if err != nil {
|
||||
if err := os.Rename(nextReleaseNotePath, tagReleaseNotePath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = gitPkg.Add(recipe.Dir, path.Join("release", "next"), internal.Dry)
|
||||
if err != nil {
|
||||
if err := gitPkg.Add(recipe.Dir, path.Join("release", "next"), internal.Dry); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = gitPkg.Add(recipe.Dir, path.Join("release", tag), internal.Dry)
|
||||
if err != nil {
|
||||
if err := gitPkg.Add(recipe.Dir, path.Join("release", tag), internal.Dry); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if !errors.Is(err, os.ErrNotExist) {
|
||||
return err
|
||||
}
|
||||
|
||||
// No release note exists for the current release.
|
||||
if internal.NoInput {
|
||||
// NOTE(d1): No release note exists for the current release. Or, we've
|
||||
// already used release/next as the release note
|
||||
if internal.NoInput || addNextAsReleaseNotes {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -51,6 +51,10 @@ func Run(version, commit string) {
|
||||
log.Logger.SetStyles(charmLog.DefaultStyles())
|
||||
charmLog.SetDefault(log.Logger)
|
||||
|
||||
if internal.MachineReadable {
|
||||
log.SetOutput(os.Stderr)
|
||||
}
|
||||
|
||||
if internal.Debug {
|
||||
log.SetLevel(log.DebugLevel)
|
||||
log.SetOutput(os.Stderr)
|
||||
|
106
go.mod
106
go.mod
@ -1,6 +1,6 @@
|
||||
module coopcloud.tech/abra
|
||||
|
||||
go 1.22.7
|
||||
go 1.23.0
|
||||
|
||||
toolchain go1.23.1
|
||||
|
||||
@ -8,21 +8,21 @@ require (
|
||||
coopcloud.tech/tagcmp v0.0.0-20230809071031-eb3e7758d4eb
|
||||
git.coopcloud.tech/toolshed/godotenv v1.5.2-0.20250103171850-4d0ca41daa5c
|
||||
github.com/AlecAivazis/survey/v2 v2.3.7
|
||||
github.com/charmbracelet/lipgloss v1.0.0
|
||||
github.com/charmbracelet/log v0.4.0
|
||||
github.com/charmbracelet/lipgloss v1.1.0
|
||||
github.com/charmbracelet/log v0.4.1
|
||||
github.com/distribution/reference v0.6.0
|
||||
github.com/docker/cli v27.4.1+incompatible
|
||||
github.com/docker/docker v27.4.1+incompatible
|
||||
github.com/docker/cli v28.0.1+incompatible
|
||||
github.com/docker/docker v28.0.1+incompatible
|
||||
github.com/docker/go-units v0.5.0
|
||||
github.com/go-git/go-git/v5 v5.13.1
|
||||
github.com/google/go-cmp v0.6.0
|
||||
github.com/go-git/go-git/v5 v5.14.0
|
||||
github.com/google/go-cmp v0.7.0
|
||||
github.com/moby/sys/signal v0.7.1
|
||||
github.com/moby/term v0.5.0
|
||||
github.com/moby/term v0.5.2
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/schollz/progressbar/v3 v3.17.1
|
||||
golang.org/x/term v0.27.0
|
||||
github.com/schollz/progressbar/v3 v3.18.0
|
||||
golang.org/x/term v0.30.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
gotest.tools/v3 v3.5.1
|
||||
gotest.tools/v3 v3.5.2
|
||||
)
|
||||
|
||||
require (
|
||||
@ -31,16 +31,19 @@ require (
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect
|
||||
github.com/BurntSushi/toml v1.4.0 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.1.3 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.1.6 // indirect
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/charmbracelet/x/ansi v0.6.0 // indirect
|
||||
github.com/cloudflare/circl v1.5.0 // indirect
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
|
||||
github.com/charmbracelet/x/ansi v0.8.0 // indirect
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
|
||||
github.com/charmbracelet/x/term v0.2.1 // indirect
|
||||
github.com/cloudflare/circl v1.6.0 // indirect
|
||||
github.com/containerd/log v0.1.0 // indirect
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6 // indirect
|
||||
github.com/cyphar/filepath-securejoin v0.3.6 // indirect
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/docker/distribution v2.8.3+incompatible // indirect
|
||||
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c // indirect
|
||||
@ -52,7 +55,7 @@ require (
|
||||
github.com/fsnotify/fsnotify v1.6.0 // indirect
|
||||
github.com/ghodss/yaml v1.0.0 // indirect
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
|
||||
github.com/go-git/go-billy/v5 v5.6.1 // indirect
|
||||
github.com/go-git/go-billy/v5 v5.6.2 // indirect
|
||||
github.com/go-logfmt/logfmt v0.6.0 // indirect
|
||||
github.com/go-logr/logr v1.4.2 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
@ -60,15 +63,15 @@ require (
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||
github.com/kevinburke/ssh_config v1.2.0 // indirect
|
||||
github.com/klauspost/compress v1.17.11 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
@ -81,49 +84,50 @@ require (
|
||||
github.com/moby/sys/user v0.3.0 // indirect
|
||||
github.com/moby/sys/userns v0.1.0 // indirect
|
||||
github.com/morikuni/aec v1.0.0 // indirect
|
||||
github.com/muesli/termenv v0.15.2 // indirect
|
||||
github.com/muesli/termenv v0.16.0 // indirect
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/runc v1.1.13 // indirect
|
||||
github.com/opencontainers/runtime-spec v1.1.0 // indirect
|
||||
github.com/pelletier/go-toml v1.9.5 // indirect
|
||||
github.com/pjbgf/sha1cd v0.3.1 // indirect
|
||||
github.com/pjbgf/sha1cd v0.3.2 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/prometheus/client_model v0.6.1 // indirect
|
||||
github.com/prometheus/common v0.61.0 // indirect
|
||||
github.com/prometheus/common v0.63.0 // indirect
|
||||
github.com/prometheus/procfs v0.15.1 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
github.com/skeema/knownhosts v1.3.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/xanzy/ssh-agent v0.3.3 // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 // indirect
|
||||
go.opentelemetry.io/otel v1.33.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.33.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect
|
||||
go.opentelemetry.io/otel v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.33.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.33.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.33.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.33.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.4.0 // indirect
|
||||
golang.org/x/crypto v0.31.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329 // indirect
|
||||
golang.org/x/mod v0.22.0 // indirect
|
||||
golang.org/x/net v0.33.0 // indirect
|
||||
golang.org/x/sync v0.10.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
golang.org/x/time v0.8.0 // indirect
|
||||
golang.org/x/tools v0.28.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d // indirect
|
||||
google.golang.org/grpc v1.69.2 // indirect
|
||||
google.golang.org/protobuf v1.36.1 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.35.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.5.0 // indirect
|
||||
golang.org/x/crypto v0.36.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect
|
||||
golang.org/x/mod v0.24.0 // indirect
|
||||
golang.org/x/net v0.37.0 // indirect
|
||||
golang.org/x/sync v0.12.0 // indirect
|
||||
golang.org/x/text v0.23.0 // indirect
|
||||
golang.org/x/time v0.11.0 // indirect
|
||||
golang.org/x/tools v0.31.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250313205543-e70fdf4c4cb4 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 // indirect
|
||||
google.golang.org/grpc v1.71.0 // indirect
|
||||
google.golang.org/protobuf v1.36.5 // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
)
|
||||
@ -132,19 +136,19 @@ require (
|
||||
github.com/containers/image v3.0.2+incompatible
|
||||
github.com/containers/storage v1.38.2 // indirect
|
||||
github.com/decentral1se/passgen v1.0.1
|
||||
github.com/docker/docker-credential-helpers v0.8.2 // indirect
|
||||
github.com/docker/docker-credential-helpers v0.9.3 // indirect
|
||||
github.com/fvbommel/sortorder v1.1.0 // indirect
|
||||
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
|
||||
github.com/gorilla/mux v1.8.1 // indirect
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7
|
||||
github.com/moby/patternmatcher v0.6.0 // indirect
|
||||
github.com/moby/sys/sequential v0.6.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.0 // indirect
|
||||
github.com/prometheus/client_golang v1.20.5 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
||||
github.com/prometheus/client_golang v1.21.1 // indirect
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
|
||||
github.com/spf13/cobra v1.8.1
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/theupdateframework/notary v0.7.0 // indirect
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
||||
golang.org/x/sys v0.28.0
|
||||
golang.org/x/sys v0.31.0
|
||||
)
|
||||
|
135
go.sum
135
go.sum
@ -81,6 +81,8 @@ github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDe
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk=
|
||||
github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
|
||||
github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw=
|
||||
github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
|
||||
github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||
@ -131,18 +133,31 @@ github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInq
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
|
||||
github.com/charmbracelet/lipgloss v1.0.0 h1:O7VkGDvqEdGi93X+DeqsQ7PKHDgtQfF8j8/O2qFMQNg=
|
||||
github.com/charmbracelet/lipgloss v1.0.0/go.mod h1:U5fy9Z+C38obMs+T+tJqst9VGzlOYGj4ri9reL3qUlo=
|
||||
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
|
||||
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
|
||||
github.com/charmbracelet/log v0.4.0 h1:G9bQAcx8rWA2T3pWvx7YtPTPwgqpk7D68BX21IRW8ZM=
|
||||
github.com/charmbracelet/log v0.4.0/go.mod h1:63bXt/djrizTec0l11H20t8FDSvA4CRZJ1KH22MdptM=
|
||||
github.com/charmbracelet/log v0.4.1 h1:6AYnoHKADkghm/vt4neaNEXkxcXLSV2g1rdyFDOpTyk=
|
||||
github.com/charmbracelet/log v0.4.1/go.mod h1:pXgyTsqsVu4N9hGdHmQ0xEA4RsXof402LX9ZgiITn2I=
|
||||
github.com/charmbracelet/x/ansi v0.6.0 h1:qOznutrb93gx9oMiGf7caF7bqqubh6YIM0SWKyA08pA=
|
||||
github.com/charmbracelet/x/ansi v0.6.0/go.mod h1:KBUFw1la39nl0dLl10l5ORDAqGXaeurTQmwyyVKse/Q=
|
||||
github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE=
|
||||
github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q=
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k=
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
|
||||
github.com/charmbracelet/x/exp/golden v0.0.0-20240806155701-69247e0abc2a h1:G99klV19u0QnhiizODirwVksQB91TJKV/UaTnACcG30=
|
||||
github.com/charmbracelet/x/exp/golden v0.0.0-20240806155701-69247e0abc2a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
|
||||
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
|
||||
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
|
||||
github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw=
|
||||
github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M=
|
||||
github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E=
|
||||
@ -162,6 +177,8 @@ github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004 h1:lkAMpLVBDaj17e
|
||||
github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004/go.mod h1:yMWuSON2oQp+43nFtAV/uvKQIFpSPerB57DCt9t8sSA=
|
||||
github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys=
|
||||
github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
|
||||
github.com/cloudflare/circl v1.6.0 h1:cr5JKic4HI+LkINy2lg3W2jF8sHCVTBncJr5gIIq7qk=
|
||||
github.com/cloudflare/circl v1.6.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
@ -287,6 +304,8 @@ github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2
|
||||
github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
|
||||
github.com/cyphar/filepath-securejoin v0.3.6 h1:4d9N5ykBnSp5Xn2JkhocYDkOpURL/18CYMpo6xB9uWM=
|
||||
github.com/cyphar/filepath-securejoin v0.3.6/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
|
||||
github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1SMSibvLzxjeJLnrYEVLULFNiHY9YfQ=
|
||||
github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s=
|
||||
github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8=
|
||||
@ -307,6 +326,8 @@ github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyG
|
||||
github.com/docker/cli v0.0.0-20191017083524-a8ff7f821017/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/cli v27.4.1+incompatible h1:VzPiUlRJ/xh+otB75gva3r05isHMo5wXDfPRi5/b4hI=
|
||||
github.com/docker/cli v27.4.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/cli v28.0.1+incompatible h1:g0h5NQNda3/CxIsaZfH4Tyf6vpxFth7PYl3hgCPOKzs=
|
||||
github.com/docker/cli v28.0.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY=
|
||||
github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
|
||||
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
|
||||
@ -315,9 +336,13 @@ github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4Kfc
|
||||
github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker v27.4.1+incompatible h1:ZJvcY7gfwHn1JF48PfbyXg7Jyt9ZCWDW+GGXOIxEwp4=
|
||||
github.com/docker/docker v27.4.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker v28.0.1+incompatible h1:FCHjSRdXhNRFjlHMTv4jUNlIBbTeRjrWfeFuJp7jpo0=
|
||||
github.com/docker/docker v28.0.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y=
|
||||
github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo=
|
||||
github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M=
|
||||
github.com/docker/docker-credential-helpers v0.9.3 h1:gAm/VtF9wgqJMoxzT3Gj5p4AqIjCBS4wrsOh9yRqcz8=
|
||||
github.com/docker/docker-credential-helpers v0.9.3/go.mod h1:x+4Gbw9aGmChi3qTLZj8Dfn0TD20M/fuWy0E5+WDeCo=
|
||||
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0=
|
||||
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q=
|
||||
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
|
||||
@ -342,6 +367,7 @@ github.com/dvsekhvalnov/jose2go v0.0.0-20170216131308-f21a8cedbbae/go.mod h1:7Bv
|
||||
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
|
||||
github.com/elazarl/goproxy v1.2.3 h1:xwIyKHbaP5yfT6O9KIeYJR5549MXRQkoQMRXGztz8YQ=
|
||||
github.com/elazarl/goproxy v1.2.3/go.mod h1:YfEbZtqP4AetfO6d40vWchF3znWX7C7Vd6ZMfdL8z64=
|
||||
github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o=
|
||||
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
|
||||
github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
|
||||
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||
@ -378,10 +404,14 @@ github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66D
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
|
||||
github.com/go-git/go-billy/v5 v5.6.1 h1:u+dcrgaguSSkbjzHwelEjc0Yj300NUevrrPphk/SoRA=
|
||||
github.com/go-git/go-billy/v5 v5.6.1/go.mod h1:0AsLr1z2+Uksi4NlElmMblP5rPcDZNRCD8ujZCRR2BE=
|
||||
github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM=
|
||||
github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII=
|
||||
github.com/go-git/go-git/v5 v5.13.1 h1:DAQ9APonnlvSWpvolXWIuV6Q6zXy2wHbN4cVlNR5Q+M=
|
||||
github.com/go-git/go-git/v5 v5.13.1/go.mod h1:qryJB4cSBoq3FRoBRf5A77joojuBcmPJ0qu3XXXVixc=
|
||||
github.com/go-git/go-git/v5 v5.14.0 h1:/MD3lCrGjCen5WfEAzKg00MJJffKhC8gzS80ycmCi60=
|
||||
github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj9ItW3Wk5k=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
@ -480,6 +510,8 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/go-containerregistry v0.5.1/go.mod h1:Ct15B4yir3PLOP5jsy0GNeYVaIZs/MK/Jz5any1wFW0=
|
||||
github.com/google/go-intervals v0.0.2/go.mod h1:MkaR3LNRfeKLPmqgJYs4E66z5InYjmCjbbr4TQlcT6Y=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
@ -519,9 +551,12 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de
|
||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1 h1:VNqngBF40hVlDloBruUehVYC3ArSgIyScOAyMRqBxRg=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1/go.mod h1:RBRO7fro65R6tjKzYgLAFo0t1QEXY1Dp+i/bvpRiqiQ=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 h1:5ZPtiqj0JL5oKWmcsq4VMaAW5ukBEgSGXEN89zeH1Jo=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3/go.mod h1:ndYquD05frm2vACXE1nsccT4oJzjhw2arTS2cpUD1PI=
|
||||
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8=
|
||||
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4=
|
||||
github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
@ -585,6 +620,8 @@ github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdY
|
||||
github.com/klauspost/compress v1.14.2/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
||||
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
@ -618,6 +655,8 @@ github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaO
|
||||
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
@ -673,6 +712,8 @@ github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcY
|
||||
github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo=
|
||||
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
|
||||
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
|
||||
github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ=
|
||||
github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
@ -683,6 +724,8 @@ github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7P
|
||||
github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ=
|
||||
github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
|
||||
github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
|
||||
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
@ -721,6 +764,8 @@ github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zM
|
||||
github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
|
||||
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
|
||||
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
|
||||
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
|
||||
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
|
||||
github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
|
||||
github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
|
||||
github.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
|
||||
@ -752,6 +797,8 @@ github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCko
|
||||
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
|
||||
github.com/pjbgf/sha1cd v0.3.1 h1:Dh2GYdpJnO84lIw0LJwTFXjcNbasP/bklicSznyAaPI=
|
||||
github.com/pjbgf/sha1cd v0.3.1/go.mod h1:Y8t7jSB/dEI/lQE04A1HVKteqjj9bX5O4+Cex0TCu8s=
|
||||
github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
|
||||
github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
@ -769,6 +816,8 @@ github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQ
|
||||
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
|
||||
github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y=
|
||||
github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
|
||||
github.com/prometheus/client_golang v1.21.1 h1:DOvXXTqVzvkIewV/CDPFdejpMCGeMcbGCQ8YOmu+Ibk=
|
||||
github.com/prometheus/client_golang v1.21.1/go.mod h1:U9NM32ykUErtVBxdvD3zfi+EuFkkaBvMb09mIfe0Zgg=
|
||||
github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
@ -784,6 +833,8 @@ github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+
|
||||
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
|
||||
github.com/prometheus/common v0.61.0 h1:3gv/GThfX0cV2lpO7gkTUwZru38mxevy90Bj8YFSRQQ=
|
||||
github.com/prometheus/common v0.61.0/go.mod h1:zr29OCN/2BsJRaFwG8QOBr41D6kkchKbpeNH7pAjb/s=
|
||||
github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k=
|
||||
github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18=
|
||||
github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
@ -806,6 +857,9 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
||||
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
|
||||
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
@ -813,6 +867,8 @@ github.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiB
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/schollz/progressbar/v3 v3.17.1 h1:bI1MTaoQO+v5kzklBjYNRQLoVpe0zbyRZNK6DFkVC5U=
|
||||
github.com/schollz/progressbar/v3 v3.17.1/go.mod h1:RzqpnsPQNjUyIgdglUjRLgD7sVnxN1wpmBMV+UiEbL4=
|
||||
github.com/schollz/progressbar/v3 v3.18.0 h1:uXdoHABRFmNIjUfte/Ex7WtuyVslrw2wVPQmCN62HpA=
|
||||
github.com/schollz/progressbar/v3 v3.18.0/go.mod h1:IsO3lpbaGuzh8zIMzgY3+J8l4C8GjO0Y9S69eFvNsec=
|
||||
github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U=
|
||||
github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo=
|
||||
github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg=
|
||||
@ -831,6 +887,8 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY=
|
||||
github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M=
|
||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
||||
@ -847,6 +905,8 @@ github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3
|
||||
github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
|
||||
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
|
||||
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
|
||||
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
|
||||
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
|
||||
github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||
github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
|
||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||
@ -857,6 +917,8 @@ github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnIn
|
||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM=
|
||||
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
|
||||
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
||||
@ -911,6 +973,8 @@ github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf
|
||||
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
|
||||
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
|
||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
@ -933,27 +997,47 @@ go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJyS
|
||||
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 h1:yd02MEjBdJkG3uabWP9apV+OuWRIXGDuJEUJbOHmCFU=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0/go.mod h1:umTcuxiv1n/s/S6/c2AT/g2CQ7u5C59sHDNmfSwgz7Q=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ=
|
||||
go.opentelemetry.io/otel v1.33.0 h1:/FerN9bax5LoK51X/sI0SVYrjSE0/yUL7DpxW4K3FWw=
|
||||
go.opentelemetry.io/otel v1.33.0/go.mod h1:SUUkR6csvUQl+yjReHu5uM3EtVV7MBm5FHKRlNx4I8I=
|
||||
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
|
||||
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.33.0 h1:7F29RDmnlqk6B5d+sUqemt8TBfDqxryYW5gX6L74RFA=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.33.0/go.mod h1:ZiGDq7xwDMKmWDrN1XsXAj0iC7hns+2DhxBFSncNHSE=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.35.0 h1:QcFwRrZLc82r8wODjvyCbP7Ifp3UANaBSmhDSFjnqSc=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.35.0/go.mod h1:CXIWhUomyWBG/oY2/r/kLp6K/cmx9e/7DLpBuuGdLCA=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 h1:Vh5HayB/0HHfOQA7Ctx69E/Y/DcQSMPpKANYVMQ7fBA=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0/go.mod h1:cpgtDBaqD/6ok/UG0jT15/uKjAY8mRA53diogHBg3UI=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0 h1:1fTNlAIJZGWLP5FVu0fikVry1IsiUnXjf7QFvoNN3Xw=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0/go.mod h1:zjPK58DtkqQFn+YUMbx0M2XV3QgKU0gS9LeGohREyK4=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0 h1:5pojmb1U1AogINhN3SurB+zm/nIcusopeBNp42f45QM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0/go.mod h1:57gTHJSE5S1tqg+EKsLPlTWhpHMsWlVmer+LA926XiA=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0 h1:m639+BofXTvcY1q8CGs4ItwQarYtJPOWmVobfM1HpVI=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0/go.mod h1:LjReUci/F4BUyv+y4dwnq3h/26iNOeC3wAIqgvTIZVo=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU=
|
||||
go.opentelemetry.io/otel/metric v1.33.0 h1:r+JOocAyeRVXD8lZpjdQjzMadVZp2M4WmQ+5WtEnklQ=
|
||||
go.opentelemetry.io/otel/metric v1.33.0/go.mod h1:L9+Fyctbp6HFTddIxClbQkjtubW6O9QS3Ann/M82u6M=
|
||||
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
|
||||
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
|
||||
go.opentelemetry.io/otel/sdk v1.33.0 h1:iax7M131HuAm9QkZotNHEfstof92xM+N8sr3uHXc2IM=
|
||||
go.opentelemetry.io/otel/sdk v1.33.0/go.mod h1:A1Q5oi7/9XaMlIWzPSxLRWOI8nG3FnzHJNbiENQuihM=
|
||||
go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY=
|
||||
go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.33.0 h1:Gs5VK9/WUJhNXZgn8MR6ITatvAmKeIuCtNbsP3JkNqU=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.33.0/go.mod h1:dL5ykHZmm1B1nVRk9dDjChwDmt81MjVp3gLkQRwKf/Q=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w=
|
||||
go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qqW2d/s=
|
||||
go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck=
|
||||
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
|
||||
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
|
||||
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
|
||||
go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg=
|
||||
go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY=
|
||||
go.opentelemetry.io/proto/otlp v1.5.0 h1:xJvq7gMzB31/d406fB8U5CBdyQGw4P399D1aQWU/3i4=
|
||||
go.opentelemetry.io/proto/otlp v1.5.0/go.mod h1:keN8WnHxOy8PG0rQZjJJ5A2ebUoafqWp0eVQ4yIXvJ4=
|
||||
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
@ -980,6 +1064,10 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
|
||||
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
|
||||
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
||||
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
@ -992,6 +1080,10 @@ golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EH
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329 h1:9kj3STMvgqy3YA4VQXBrN7925ICMxD5wzMRcgA30588=
|
||||
golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329/go.mod h1:qj5a5QZpwLU2NLQudwIN5koi3beDhSAlJwa67PuM98c=
|
||||
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 h1:yqrTHse8TCMW1M1ZCP+VAR/l0kKxwaAIqN/il7x4voA=
|
||||
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8/go.mod h1:tujkw807nyEEAamNbDrEGzRav+ilXA7PCRAd6xsmwiU=
|
||||
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
|
||||
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
@ -1016,6 +1108,8 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4=
|
||||
golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||
golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU=
|
||||
golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@ -1059,6 +1153,10 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
||||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c=
|
||||
golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@ -1078,6 +1176,8 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
|
||||
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@ -1159,11 +1259,19 @@ golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
|
||||
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||
golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=
|
||||
golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
@ -1175,6 +1283,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
@ -1183,6 +1293,10 @@ golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxb
|
||||
golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
|
||||
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY=
|
||||
golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0=
|
||||
golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
||||
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
@ -1230,6 +1344,10 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8=
|
||||
golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw=
|
||||
golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE=
|
||||
golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588=
|
||||
golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU=
|
||||
golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@ -1280,8 +1398,16 @@ google.golang.org/genproto v0.0.0-20200527145253-8367513e4ece/go.mod h1:jDfRM7Fc
|
||||
google.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d h1:H8tOf8XM88HvKqLTxe755haY6r1fqqzLbEnfrmLXlSA=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d/go.mod h1:2v7Z7gP2ZUOGsaFyxATQSRoBnKygqVq2Cwnvom7QiqY=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422 h1:GVIKPyP/kLIyVOgOnTwFOrvQaQUzOzGMCxgFUOEmm24=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422/go.mod h1:b6h1vNKhxaSoEI+5jc3PJUCustfli/mRab7295pY7rw=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250313205543-e70fdf4c4cb4 h1:IFnXJq3UPB3oBREOodn1v1aGQeZYQclEmvWRMN0PSsY=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250313205543-e70fdf4c4cb4/go.mod h1:c8q6Z6OCqnfVIqUFJkCzKcrj8eCvUrz+K4KRzSTuANg=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d h1:xJJRGY7TJcvIlpSrN3K6LAWgNFUILlO+OMAqtg9aqnw=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d/go.mod h1:3ENsm/5D1mzDyhpzeRi1NR784I0BcofWBoSc5QqqMK4=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422 h1:3UsHvIr4Wc2aW4brOaSCmcxh9ksica6fHEr8P1XhkYw=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422/go.mod h1:3ENsm/5D1mzDyhpzeRi1NR784I0BcofWBoSc5QqqMK4=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 h1:iK2jbkWL86DXjEx0qiHcRE9dE4/Ahua5k6V8OWFb//c=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4/go.mod h1:LuRYeWDFV6WOn90g357N17oMCaxpgCnbi/44qJvDn2I=
|
||||
google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
|
||||
google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
@ -1303,6 +1429,8 @@ google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAG
|
||||
google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
|
||||
google.golang.org/grpc v1.69.2 h1:U3S9QEtbXC0bYNvRtcoklF3xGtLViumSYxWykJS+7AU=
|
||||
google.golang.org/grpc v1.69.2/go.mod h1:vyjdE6jLBI76dgpDojsFGNaHlxdjXN9ghpnd2o7JGZ4=
|
||||
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
|
||||
google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
@ -1318,6 +1446,10 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ
|
||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/gk=
|
||||
google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
google.golang.org/protobuf v1.36.2 h1:R8FeyR1/eLmkutZOM5CWghmo5itiG9z0ktFlTVLuTmU=
|
||||
google.golang.org/protobuf v1.36.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
|
||||
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/cenkalti/backoff.v2 v2.2.1 h1:eJ9UAg01/HIHG987TwxvnzK2MgxXq97YY6rYDpY9aII=
|
||||
@ -1357,11 +1489,14 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
|
||||
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
|
||||
gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
|
||||
gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8=
|
||||
gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU=
|
||||
gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU=
|
||||
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
|
||||
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
|
@ -655,19 +655,6 @@ func (a App) WriteRecipeVersion(version string, dryRun bool) error {
|
||||
|
||||
splitted := strings.Split(line, ":")
|
||||
|
||||
if a.Recipe.Dirty {
|
||||
dirtyVersion = fmt.Sprintf("%s%s", version, config.DIRTY_DEFAULT)
|
||||
if strings.Contains(line, dirtyVersion) {
|
||||
skipped = true
|
||||
lines = append(lines, line)
|
||||
continue
|
||||
}
|
||||
|
||||
line = fmt.Sprintf("%s:%s", splitted[0], dirtyVersion)
|
||||
lines = append(lines, line)
|
||||
continue
|
||||
}
|
||||
|
||||
line = fmt.Sprintf("%s:%s", splitted[0], version)
|
||||
lines = append(lines, line)
|
||||
}
|
||||
|
@ -223,16 +223,4 @@ func TestWriteRecipeVersionOverwrite(t *testing.T) {
|
||||
}
|
||||
|
||||
assert.Equal(t, "foo", app.Recipe.EnvVersion)
|
||||
|
||||
app.Recipe.Dirty = true
|
||||
if err := app.WriteRecipeVersion("foo+U", false); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
app, err = appPkg.GetApp(testPkg.ExpectedAppFiles, testPkg.AppName)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
assert.Equal(t, "foo+U", app.Recipe.EnvVersion)
|
||||
}
|
||||
|
@ -44,6 +44,16 @@ func SetChaosVersionLabel(compose *composetypes.Config, stackName string, chaosV
|
||||
}
|
||||
}
|
||||
|
||||
func SetVersionLabel(compose *composetypes.Config, stackName string, version string) {
|
||||
for _, service := range compose.Services {
|
||||
if service.Name == "app" {
|
||||
log.Debugf("set label 'coop-cloud.%s.version' to %v for %s", stackName, version, stackName)
|
||||
labelKey := fmt.Sprintf("coop-cloud.%s.version", stackName)
|
||||
service.Deploy.Labels[labelKey] = version
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SetUpdateLabel adds env ENABLE_AUTO_UPDATE as label to enable/disable the
|
||||
// auto update process for this app. The default if this variable is not set is to disable
|
||||
// the auto update process.
|
||||
|
@ -192,7 +192,7 @@ func TestEnvVarCommentsRemoved(t *testing.T) {
|
||||
|
||||
envVar, exists = envSample["SECRET_TEST_PASS_TWO_VERSION"]
|
||||
if !exists {
|
||||
t.Fatal("WITH_COMMENT env var should be present in .env.sample")
|
||||
t.Fatal("SECRET_TEST_PASS_TWO_VERSION env var should be present in .env.sample")
|
||||
}
|
||||
|
||||
if strings.Contains(envVar, "length") {
|
||||
|
@ -4,11 +4,14 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"coopcloud.tech/abra/pkg/config"
|
||||
"coopcloud.tech/abra/pkg/formatter"
|
||||
gitPkg "coopcloud.tech/abra/pkg/git"
|
||||
"coopcloud.tech/abra/pkg/log"
|
||||
"coopcloud.tech/tagcmp"
|
||||
"github.com/distribution/reference"
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
@ -43,6 +46,9 @@ func (r Recipe) Ensure(ctx EnsureContext) error {
|
||||
|
||||
if r.EnvVersion != "" && !ctx.IgnoreEnvVersion {
|
||||
log.Debugf("ensuring env version %s", r.EnvVersion)
|
||||
if strings.Contains(r.EnvVersion, "+U") {
|
||||
log.Fatalf("can not redeploy chaos version (%s) without --chaos", r.EnvVersion)
|
||||
}
|
||||
|
||||
if _, err := r.EnsureVersion(r.EnvVersion); err != nil {
|
||||
return err
|
||||
@ -272,19 +278,14 @@ func (r Recipe) EnsureUpToDate() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// IsDirty checks whether a recipe is dirty or not. N.B., if you call IsDirty
|
||||
// from another Recipe method, you should propagate the pointer reference (*).
|
||||
func (r *Recipe) IsDirty() error {
|
||||
// IsDirty checks whether a recipe is dirty or not.
|
||||
func (r *Recipe) IsDirty() (bool, error) {
|
||||
isClean, err := gitPkg.IsClean(r.Dir)
|
||||
if err != nil {
|
||||
return err
|
||||
return false, err
|
||||
}
|
||||
|
||||
if !isClean {
|
||||
r.Dirty = true
|
||||
}
|
||||
|
||||
return nil
|
||||
return !isClean, nil
|
||||
}
|
||||
|
||||
// ChaosVersion constructs a chaos mode recipe version.
|
||||
@ -298,8 +299,12 @@ func (r *Recipe) ChaosVersion() (string, error) {
|
||||
|
||||
version = formatter.SmallSHA(head.String())
|
||||
|
||||
if err := r.IsDirty(); err != nil {
|
||||
return version, err
|
||||
dirty, err := r.IsDirty()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if dirty {
|
||||
return fmt.Sprintf("%s%s", version, config.DIRTY_DEFAULT), nil
|
||||
}
|
||||
|
||||
return version, nil
|
||||
@ -345,6 +350,18 @@ func (r Recipe) Tags() ([]string, error) {
|
||||
return tags, err
|
||||
}
|
||||
|
||||
sort.Slice(tags, func(i, j int) bool {
|
||||
version1, err := tagcmp.Parse(tags[i])
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
version2, err := tagcmp.Parse(tags[j])
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return version1.IsLessThan(version2)
|
||||
})
|
||||
|
||||
log.Debugf("detected %s as tags for recipe %s", strings.Join(tags, ", "), r.Name)
|
||||
|
||||
return tags, nil
|
||||
|
@ -15,10 +15,6 @@ func TestIsDirty(t *testing.T) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := r.IsDirty(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
assert.False(t, r.Dirty)
|
||||
|
||||
fpath := filepath.Join(r.Dir, "foo.txt")
|
||||
@ -31,9 +27,10 @@ func TestIsDirty(t *testing.T) {
|
||||
os.Remove(fpath)
|
||||
})
|
||||
|
||||
if err := r.IsDirty(); err != nil {
|
||||
dirty, err := r.IsDirty()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
assert.True(t, r.Dirty)
|
||||
assert.True(t, dirty)
|
||||
}
|
||||
|
@ -12,6 +12,8 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
|
||||
"coopcloud.tech/abra/pkg/catalogue"
|
||||
"coopcloud.tech/abra/pkg/config"
|
||||
"coopcloud.tech/abra/pkg/formatter"
|
||||
@ -20,7 +22,6 @@ import (
|
||||
"coopcloud.tech/abra/pkg/log"
|
||||
"coopcloud.tech/abra/pkg/web"
|
||||
"coopcloud.tech/tagcmp"
|
||||
"github.com/go-git/go-git/v5"
|
||||
)
|
||||
|
||||
// RecipeCatalogueURL is the only current recipe catalogue available.
|
||||
@ -119,22 +120,9 @@ type Features struct {
|
||||
SSO string `json:"sso"`
|
||||
}
|
||||
|
||||
func GetEnvVersionRaw(name string) (string, error) {
|
||||
var version string
|
||||
|
||||
if strings.Contains(name, ":") {
|
||||
split := strings.Split(name, ":")
|
||||
if len(split) > 2 {
|
||||
return version, fmt.Errorf("version seems invalid: %s", name)
|
||||
}
|
||||
version = split[1]
|
||||
}
|
||||
|
||||
return version, nil
|
||||
}
|
||||
|
||||
func Get(name string) Recipe {
|
||||
version := ""
|
||||
versionRaw := ""
|
||||
if strings.Contains(name, ":") {
|
||||
split := strings.Split(name, ":")
|
||||
if len(split) > 2 {
|
||||
@ -143,6 +131,7 @@ func Get(name string) Recipe {
|
||||
name = split[0]
|
||||
|
||||
version = split[1]
|
||||
versionRaw = version
|
||||
if strings.HasSuffix(version, config.DIRTY_DEFAULT) {
|
||||
version = strings.Replace(split[1], config.DIRTY_DEFAULT, "", 1)
|
||||
log.Debugf("removed dirty suffix from .env version: %s -> %s", split[1], version)
|
||||
@ -167,11 +156,12 @@ func Get(name string) Recipe {
|
||||
dir := path.Join(config.RECIPES_DIR, escapeRecipeName(name))
|
||||
|
||||
r := Recipe{
|
||||
Name: name,
|
||||
EnvVersion: version,
|
||||
Dir: dir,
|
||||
GitURL: gitURL,
|
||||
SSHURL: sshURL,
|
||||
Name: name,
|
||||
EnvVersion: version,
|
||||
EnvVersionRaw: versionRaw,
|
||||
Dir: dir,
|
||||
GitURL: gitURL,
|
||||
SSHURL: sshURL,
|
||||
|
||||
ComposePath: path.Join(dir, "compose.yml"),
|
||||
ReadmePath: path.Join(dir, "README.md"),
|
||||
@ -179,20 +169,23 @@ func Get(name string) Recipe {
|
||||
AbraShPath: path.Join(dir, "abra.sh"),
|
||||
}
|
||||
|
||||
if err := r.IsDirty(); err != nil && !errors.Is(err, git.ErrRepositoryNotExists) {
|
||||
dirty, err := r.IsDirty()
|
||||
if err != nil && !errors.Is(err, git.ErrRepositoryNotExists) {
|
||||
log.Fatalf("failed to check git status of %s: %s", r.Name, err)
|
||||
}
|
||||
r.Dirty = dirty
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
type Recipe struct {
|
||||
Name string
|
||||
EnvVersion string
|
||||
Dirty bool // NOTE(d1): git terminology for unstaged changes
|
||||
Dir string
|
||||
GitURL string
|
||||
SSHURL string
|
||||
Name string
|
||||
EnvVersion string
|
||||
EnvVersionRaw string
|
||||
Dirty bool // NOTE(d1): git terminology for unstaged changes
|
||||
Dir string
|
||||
GitURL string
|
||||
SSHURL string
|
||||
|
||||
ComposePath string
|
||||
ReadmePath string
|
||||
|
@ -34,6 +34,7 @@ func TestGet(t *testing.T) {
|
||||
recipe: Recipe{
|
||||
Name: "foo",
|
||||
EnvVersion: "1.2.3",
|
||||
EnvVersionRaw: "1.2.3",
|
||||
Dir: path.Join(cfg.GetAbraDir(), "/recipes/foo"),
|
||||
GitURL: "https://git.coopcloud.tech/coop-cloud/foo.git",
|
||||
SSHURL: "ssh://git@git.coopcloud.tech:2222/coop-cloud/foo.git",
|
||||
@ -61,6 +62,22 @@ func TestGet(t *testing.T) {
|
||||
recipe: Recipe{
|
||||
Name: "mygit.org/myorg/cool-recipe",
|
||||
EnvVersion: "1.2.4",
|
||||
EnvVersionRaw: "1.2.4",
|
||||
Dir: path.Join(cfg.GetAbraDir(), "/recipes/mygit_org_myorg_cool-recipe"),
|
||||
GitURL: "https://mygit.org/myorg/cool-recipe.git",
|
||||
SSHURL: "ssh://git@mygit.org/myorg/cool-recipe.git",
|
||||
ComposePath: path.Join(cfg.GetAbraDir(), "recipes/mygit_org_myorg_cool-recipe/compose.yml"),
|
||||
ReadmePath: path.Join(cfg.GetAbraDir(), "recipes/mygit_org_myorg_cool-recipe/README.md"),
|
||||
SampleEnvPath: path.Join(cfg.GetAbraDir(), "recipes/mygit_org_myorg_cool-recipe/.env.sample"),
|
||||
AbraShPath: path.Join(cfg.GetAbraDir(), "recipes/mygit_org_myorg_cool-recipe/abra.sh"),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "mygit.org/myorg/cool-recipe:1e83340e+U",
|
||||
recipe: Recipe{
|
||||
Name: "mygit.org/myorg/cool-recipe",
|
||||
EnvVersion: "1e83340e",
|
||||
EnvVersionRaw: "1e83340e+U",
|
||||
Dir: path.Join(cfg.GetAbraDir(), "/recipes/mygit_org_myorg_cool-recipe"),
|
||||
GitURL: "https://mygit.org/myorg/cool-recipe.git",
|
||||
SSHURL: "ssh://git@mygit.org/myorg/cool-recipe.git",
|
||||
@ -105,16 +122,3 @@ func TestGetVersionLabelLocalDoesNotUseTimeoutLabel(t *testing.T) {
|
||||
assert.NotEqual(t, label, defaultTimeoutLabel)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDirtyMarkerRemoved(t *testing.T) {
|
||||
r := Get("abra-test-recipe:1e83340e+U")
|
||||
assert.Equal(t, "1e83340e", r.EnvVersion)
|
||||
}
|
||||
|
||||
func TestGetEnvVersionRaw(t *testing.T) {
|
||||
v, err := GetEnvVersionRaw("abra-test-recipe:1e83340e+U")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
assert.Equal(t, "1e83340e+U", v)
|
||||
}
|
||||
|
@ -9,14 +9,14 @@ import (
|
||||
"coopcloud.tech/abra/pkg/log"
|
||||
"github.com/docker/cli/cli"
|
||||
"github.com/docker/cli/cli/command"
|
||||
"github.com/docker/docker/api/types"
|
||||
"github.com/docker/docker/api/types/container"
|
||||
apiclient "github.com/docker/docker/client"
|
||||
)
|
||||
|
||||
// RunExec runs a command on a remote container. io.Writer corresponds to the
|
||||
// command output.
|
||||
func RunExec(dockerCli command.Cli, client *apiclient.Client, containerID string,
|
||||
execConfig *types.ExecConfig) (io.Writer, error) {
|
||||
execOptions *container.ExecOptions) (io.Writer, error) {
|
||||
ctx := context.Background()
|
||||
|
||||
// We need to check the tty _before_ we do the ContainerExecCreate, because
|
||||
@ -26,13 +26,13 @@ func RunExec(dockerCli command.Cli, client *apiclient.Client, containerID string
|
||||
if _, err := client.ContainerInspect(ctx, containerID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !execConfig.Detach {
|
||||
if err := dockerCli.In().CheckTty(execConfig.AttachStdin, execConfig.Tty); err != nil {
|
||||
if !execOptions.Detach {
|
||||
if err := dockerCli.In().CheckTty(execOptions.AttachStdin, execOptions.Tty); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
response, err := client.ContainerExecCreate(ctx, containerID, *execConfig)
|
||||
response, err := client.ContainerExecCreate(ctx, containerID, *execOptions)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -42,40 +42,40 @@ func RunExec(dockerCli command.Cli, client *apiclient.Client, containerID string
|
||||
return nil, errors.New("exec ID empty")
|
||||
}
|
||||
|
||||
if execConfig.Detach {
|
||||
execStartCheck := types.ExecStartCheck{
|
||||
Detach: execConfig.Detach,
|
||||
Tty: execConfig.Tty,
|
||||
if execOptions.Detach {
|
||||
execStartCheck := container.ExecStartOptions{
|
||||
Detach: execOptions.Detach,
|
||||
Tty: execOptions.Tty,
|
||||
}
|
||||
return nil, client.ContainerExecStart(ctx, execID, execStartCheck)
|
||||
}
|
||||
return interactiveExec(ctx, dockerCli, client, execConfig, execID)
|
||||
return interactiveExec(ctx, dockerCli, client, execOptions, execID)
|
||||
}
|
||||
|
||||
func interactiveExec(ctx context.Context, dockerCli command.Cli, client *apiclient.Client,
|
||||
execConfig *types.ExecConfig, execID string) (io.Writer, error) {
|
||||
execOpts *container.ExecOptions, execID string) (io.Writer, error) {
|
||||
// Interactive exec requested.
|
||||
var (
|
||||
out, stderr io.Writer
|
||||
in io.ReadCloser
|
||||
)
|
||||
|
||||
if execConfig.AttachStdin {
|
||||
if execOpts.AttachStdin {
|
||||
in = dockerCli.In()
|
||||
}
|
||||
if execConfig.AttachStdout {
|
||||
if execOpts.AttachStdout {
|
||||
out = dockerCli.Out()
|
||||
}
|
||||
if execConfig.AttachStderr {
|
||||
if execConfig.Tty {
|
||||
if execOpts.AttachStderr {
|
||||
if execOpts.Tty {
|
||||
stderr = dockerCli.Out()
|
||||
} else {
|
||||
stderr = dockerCli.Err()
|
||||
}
|
||||
}
|
||||
|
||||
execStartCheck := types.ExecStartCheck{
|
||||
Tty: execConfig.Tty,
|
||||
execStartCheck := container.ExecStartOptions{
|
||||
Tty: execOpts.Tty,
|
||||
}
|
||||
resp, err := client.ContainerExecAttach(ctx, execID, execStartCheck)
|
||||
if err != nil {
|
||||
@ -94,15 +94,15 @@ func interactiveExec(ctx context.Context, dockerCli command.Cli, client *apiclie
|
||||
outputStream: out,
|
||||
errorStream: stderr,
|
||||
resp: resp,
|
||||
tty: execConfig.Tty,
|
||||
detachKeys: execConfig.DetachKeys,
|
||||
tty: execOpts.Tty,
|
||||
detachKeys: execOpts.DetachKeys,
|
||||
}
|
||||
|
||||
return streamer.stream(ctx)
|
||||
}()
|
||||
}()
|
||||
|
||||
if execConfig.Tty && dockerCli.In().IsTerminal() {
|
||||
if execOpts.Tty && dockerCli.In().IsTerminal() {
|
||||
if err := MonitorTtySize(ctx, client, dockerCli, execID, true); err != nil {
|
||||
fmt.Fprintln(dockerCli.Err(), "Error monitoring TTY size:", err)
|
||||
}
|
||||
|
@ -5,7 +5,6 @@ import (
|
||||
"strings"
|
||||
|
||||
composetypes "github.com/docker/cli/cli/compose/types"
|
||||
"github.com/docker/docker/api/types"
|
||||
networktypes "github.com/docker/docker/api/types/network"
|
||||
"github.com/docker/docker/api/types/swarm"
|
||||
)
|
||||
@ -52,13 +51,13 @@ func AddStackLabel(namespace Namespace, labels map[string]string) map[string]str
|
||||
type networkMap map[string]composetypes.NetworkConfig
|
||||
|
||||
// Networks from the compose-file type to the engine API type
|
||||
func Networks(namespace Namespace, networks networkMap, servicesNetworks map[string]struct{}) (map[string]types.NetworkCreate, []string) {
|
||||
func Networks(namespace Namespace, networks networkMap, servicesNetworks map[string]struct{}) (map[string]networktypes.CreateOptions, []string) {
|
||||
if networks == nil {
|
||||
networks = make(map[string]composetypes.NetworkConfig)
|
||||
}
|
||||
|
||||
externalNetworks := []string{}
|
||||
result := make(map[string]types.NetworkCreate)
|
||||
result := make(map[string]networktypes.CreateOptions)
|
||||
for internalName := range servicesNetworks {
|
||||
network := networks[internalName]
|
||||
if network.External.External {
|
||||
@ -66,7 +65,7 @@ func Networks(namespace Namespace, networks networkMap, servicesNetworks map[str
|
||||
continue
|
||||
}
|
||||
|
||||
createOpts := types.NetworkCreate{
|
||||
createOpts := networktypes.CreateOptions{
|
||||
Labels: AddStackLabel(namespace, network.Labels),
|
||||
Driver: network.Driver,
|
||||
Options: network.DriverOpts,
|
||||
|
@ -4,7 +4,6 @@ import (
|
||||
"testing"
|
||||
|
||||
composetypes "github.com/docker/cli/cli/compose/types"
|
||||
"github.com/docker/docker/api/types"
|
||||
"github.com/docker/docker/api/types/network"
|
||||
"gotest.tools/v3/assert"
|
||||
is "gotest.tools/v3/assert/cmp"
|
||||
@ -67,7 +66,7 @@ func TestNetworks(t *testing.T) {
|
||||
Name: "othername",
|
||||
},
|
||||
}
|
||||
expected := map[string]types.NetworkCreate{
|
||||
expected := map[string]network.CreateOptions{
|
||||
"foo_default": {
|
||||
Labels: map[string]string{
|
||||
LabelNamespace: "foo",
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
|
||||
"coopcloud.tech/abra/pkg/log"
|
||||
"github.com/docker/docker/api/types"
|
||||
"github.com/docker/docker/api/types/network"
|
||||
"github.com/docker/docker/api/types/swarm"
|
||||
"github.com/docker/docker/api/types/versions"
|
||||
"github.com/docker/docker/client"
|
||||
@ -99,7 +100,7 @@ func removeServices(
|
||||
func removeNetworks(
|
||||
ctx context.Context,
|
||||
client *apiclient.Client,
|
||||
networks []types.NetworkResource,
|
||||
networks []network.Inspect,
|
||||
) bool {
|
||||
var hasError bool
|
||||
for _, network := range networks {
|
||||
|
@ -22,6 +22,7 @@ import (
|
||||
"github.com/docker/docker/api/types"
|
||||
"github.com/docker/docker/api/types/container"
|
||||
"github.com/docker/docker/api/types/filters"
|
||||
networktypes "github.com/docker/docker/api/types/network"
|
||||
"github.com/docker/docker/api/types/swarm"
|
||||
"github.com/docker/docker/api/types/versions"
|
||||
"github.com/docker/docker/client"
|
||||
@ -296,7 +297,7 @@ func validateExternalNetworks(ctx context.Context, client dockerClient.NetworkAP
|
||||
// local-scoped networks, so there's no need to inspect them.
|
||||
continue
|
||||
}
|
||||
network, err := client.NetworkInspect(ctx, networkName, types.NetworkInspectOptions{})
|
||||
network, err := client.NetworkInspect(ctx, networkName, networktypes.InspectOptions{})
|
||||
switch {
|
||||
case dockerClient.IsErrNotFound(err):
|
||||
return errors.Errorf("network %q is declared as external, but could not be found. You need to create a swarm-scoped network before the stack is deployed, which you can do by running this on the server: docker network create -d overlay proxy", networkName)
|
||||
@ -353,13 +354,13 @@ func createConfigs(ctx context.Context, cl *dockerClient.Client, configs []swarm
|
||||
return nil
|
||||
}
|
||||
|
||||
func createNetworks(ctx context.Context, cl *dockerClient.Client, namespace convert.Namespace, networks map[string]types.NetworkCreate) error {
|
||||
func createNetworks(ctx context.Context, cl *dockerClient.Client, namespace convert.Namespace, networks map[string]networktypes.CreateOptions) error {
|
||||
existingNetworks, err := getStackNetworks(ctx, cl, namespace.Name())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
existingNetworkMap := make(map[string]types.NetworkResource)
|
||||
existingNetworkMap := make(map[string]networktypes.Inspect)
|
||||
for _, network := range existingNetworks {
|
||||
existingNetworkMap[network.Name] = network
|
||||
}
|
||||
@ -473,8 +474,8 @@ func deployServices(
|
||||
return serviceIDs, nil
|
||||
}
|
||||
|
||||
func getStackNetworks(ctx context.Context, dockerclient client.APIClient, namespace string) ([]types.NetworkResource, error) {
|
||||
return dockerclient.NetworkList(ctx, types.NetworkListOptions{Filters: getStackFilter(namespace)})
|
||||
func getStackNetworks(ctx context.Context, dockerclient client.APIClient, namespace string) ([]networktypes.Inspect, error) {
|
||||
return dockerclient.NetworkList(ctx, networktypes.ListOptions{Filters: getStackFilter(namespace)})
|
||||
}
|
||||
|
||||
func getStackSecrets(ctx context.Context, dockerclient client.APIClient, namespace string) ([]swarm.Secret, error) {
|
||||
|
@ -50,6 +50,9 @@ teardown(){
|
||||
assert_failure
|
||||
assert_output --partial 'locally unstaged changes'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
@ -62,6 +65,9 @@ teardown(){
|
||||
run $ABRA app check "$TEST_APP_DOMAIN" --chaos
|
||||
assert_success
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
|
@ -53,6 +53,9 @@ teardown(){
|
||||
assert_failure
|
||||
assert_output --partial 'locally unstaged changes'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
@ -66,6 +69,9 @@ teardown(){
|
||||
assert_success
|
||||
assert_output --partial 'baz'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
|
@ -24,6 +24,9 @@ teardown(){
|
||||
_rm_remote "/etc/*.txt"
|
||||
|
||||
_rm "$BATS_TMPDIR/mydir"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
|
||||
@test "validate app argument" {
|
||||
@ -34,6 +37,42 @@ teardown(){
|
||||
assert_failure
|
||||
}
|
||||
|
||||
@test "bail if unstaged changes and no --chaos" {
|
||||
_mkdir "$BATS_TMPDIR/mydir"
|
||||
_mkfile "$BATS_TMPDIR/mydir/myfile.txt" "foo"
|
||||
|
||||
run bash -c "echo foo >> $ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_success
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
|
||||
run $ABRA app cp "$TEST_APP_DOMAIN" "$BATS_TMPDIR/mydir" app:/etc
|
||||
assert_failure
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
|
||||
@test "do not bail if unstaged changes and --chaos" {
|
||||
_mkdir "$BATS_TMPDIR/mydir"
|
||||
_mkfile "$BATS_TMPDIR/mydir/myfile.txt" "foo"
|
||||
|
||||
run bash -c "echo foo >> $ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_success
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
|
||||
run $ABRA app cp "$TEST_APP_DOMAIN" "$BATS_TMPDIR/mydir" app:/etc --chaos
|
||||
assert_success
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
|
||||
@test "error if missing src/dest arguments" {
|
||||
run $ABRA app cp "$TEST_APP_DOMAIN"
|
||||
assert_failure
|
||||
|
@ -21,8 +21,10 @@ setup(){
|
||||
|
||||
teardown(){
|
||||
_reset_recipe
|
||||
_reset_app
|
||||
_undeploy_app
|
||||
_undeploy_app2 "gitea.$TEST_SERVER"
|
||||
|
||||
_reset_app
|
||||
_reset_tags
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
@ -46,6 +48,9 @@ teardown(){
|
||||
assert_success
|
||||
assert_output --partial 'foo'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" --no-input
|
||||
assert_failure
|
||||
assert_output --partial 'locally unstaged changes'
|
||||
@ -62,6 +67,9 @@ teardown(){
|
||||
assert_success
|
||||
assert_output --partial 'foo'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" \
|
||||
--chaos --no-input --no-converge-checks
|
||||
assert_success
|
||||
@ -216,19 +224,6 @@ teardown(){
|
||||
run $ABRA app deploy "gitea.$TEST_SERVER" --no-input --no-converge-checks
|
||||
assert_success
|
||||
assert_output --partial "$latestVersion"
|
||||
|
||||
run $ABRA app undeploy "gitea.$TEST_SERVER" --no-input
|
||||
assert_success
|
||||
|
||||
run $ABRA app secret remove "gitea.$TEST_SERVER" --all --no-input
|
||||
assert_success
|
||||
|
||||
run $ABRA app volume remove "gitea.$TEST_SERVER" --no-input
|
||||
assert_success
|
||||
|
||||
run $ABRA app remove "gitea.$TEST_SERVER" --no-input
|
||||
assert_success
|
||||
assert_not_exists "$ABRA_DIR/servers/$TEST_SERVER/gitea.$TEST_SERVER.env"
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@ -423,3 +418,12 @@ teardown(){
|
||||
assert_success
|
||||
assert_output --partial "$latestRelease"
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "no chaos version label if no chaos" {
|
||||
_deploy_app
|
||||
|
||||
run $ABRA app labels "$TEST_APP_DOMAIN" --no-input
|
||||
assert_success
|
||||
refute_output --regexp "coop-cloud.abra-test-recipe.$TEST_SERVER.chaos-version"
|
||||
}
|
||||
|
@ -54,13 +54,21 @@ teardown(){
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "chaos commit written to env" {
|
||||
@test "deploy commit written to env and redeploy keeps that version" {
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" "1e83340e" --no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:1e83340e" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" \
|
||||
--force --no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:1e83340e" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@ -98,12 +106,15 @@ teardown(){
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "deploy overwrites chaos deploy" {
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" "1e83340e" \
|
||||
--no-input --no-converge-checks
|
||||
@test "takes deployed version when no .env version is present " {
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" "0.1.0+1.20.0" --no-input --no-converge-checks --ignore-env-version
|
||||
assert_success
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:1e83340e" \
|
||||
run grep -q "TYPE=$TEST_RECIPE:0.1.0+1.20.0" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
|
||||
run sed -i 's/TYPE=abra-test-recipe:.*/TYPE=abra-test-recipe/g' \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
|
||||
@ -111,7 +122,7 @@ teardown(){
|
||||
--force --no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:1e83340e" \
|
||||
run grep -q "TYPE=$TEST_RECIPE:0.1.0+1.20.0" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_failure
|
||||
assert_success
|
||||
}
|
||||
|
@ -20,8 +20,8 @@ setup(){
|
||||
|
||||
teardown(){
|
||||
_reset_recipe
|
||||
_reset_app
|
||||
_undeploy_app
|
||||
_reset_app
|
||||
_reset_tags
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
@ -37,17 +37,10 @@ teardown(){
|
||||
--no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*N/A'
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.* ' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*N/A'
|
||||
assert_output --regexp 'NEW VERSION.*' + "${latestRelease}"
|
||||
assert_output --partial 'NEW DEPLOY OVERVIEW'
|
||||
assert_output --partial 'CURRENT DEPLOYMENT N/A'
|
||||
assert_output --partial 'ENV VERSION N/A'
|
||||
assert_output --partial "NEW DEPLOYMENT ${latestRelease}"
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
@ -61,17 +54,10 @@ teardown(){
|
||||
--no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*N/A'
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.* ' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'NEW VERSION.*' + "${latestRelease}"
|
||||
assert_output --partial 'NEW DEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT N/A"
|
||||
assert_output --partial "ENV VERSION ${latestRelease}"
|
||||
assert_output --partial "NEW DEPLOYMENT ${latestRelease}"
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
@ -90,17 +76,10 @@ teardown(){
|
||||
--no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*N/A'
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "0.1.1+1.20.2"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "0.1.1+1.20.2"
|
||||
assert_output --regexp 'NEW VERSION.*' + "0.1.1+1.20.2"
|
||||
assert_output --partial 'NEW DEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT N/A"
|
||||
assert_output --partial "ENV VERSION 0.1.1+1.20.2"
|
||||
assert_output --partial "NEW DEPLOYMENT 0.1.1+1.20.2"
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:0.1.1+1.20.2" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
@ -120,17 +99,10 @@ teardown(){
|
||||
--no-input --no-converge-checks --ignore-env-version
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*N/A'
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "0.1.1+1.20.2"
|
||||
assert_output --regexp 'NEW VERSION.*' + "${latestRelease}"
|
||||
assert_output --partial 'NEW DEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT N/A"
|
||||
assert_output --partial "ENV VERSION 0.1.1+1.20.2"
|
||||
assert_output --partial "NEW DEPLOYMENT ${latestRelease}"
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
@ -153,17 +125,10 @@ teardown(){
|
||||
--no-input --no-converge-checks --chaos
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}+U"
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}+U"
|
||||
assert_output --partial 'CHAOS DEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT ${latestRelease}"
|
||||
assert_output --partial "ENV VERSION ${latestRelease}"
|
||||
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}+U"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
@ -173,7 +138,7 @@ teardown(){
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "chaos deploy then force deploy" {
|
||||
@test "can not redeploy chaos version without --chaos" {
|
||||
headHash=$(_get_head_hash)
|
||||
latestRelease=$(_latest_release)
|
||||
|
||||
@ -189,27 +154,12 @@ teardown(){
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" \
|
||||
--no-input --no-converge-checks --force
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}+U"
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "${headHash:0:8}+U"
|
||||
assert_output --regexp 'NEW VERSION.*' + "${latestRelease}"
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
--no-input --no-converge-checks --force --debug
|
||||
assert_failure
|
||||
assert_output --regexp 'can not redeploy chaos version .*' + "${headHash:0:8}+U"
|
||||
}
|
||||
|
||||
@test "deploy then force chaos commit deploy" {
|
||||
@test "deploy then force commit deploy" {
|
||||
headHash=$(_get_head_hash)
|
||||
latestRelease=$(_latest_release)
|
||||
|
||||
@ -225,17 +175,10 @@ teardown(){
|
||||
--no-input --no-converge-checks --force
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}"
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}"
|
||||
assert_output --partial 'DEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT ${latestRelease}"
|
||||
assert_output --partial "ENV VERSION ${latestRelease}"
|
||||
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}"
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
@ -250,17 +193,10 @@ teardown(){
|
||||
--no-input --no-converge-checks --chaos
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*N/A'
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}"
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}"
|
||||
assert_output --partial 'NEW DEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT N/A"
|
||||
assert_output --partial "ENV VERSION ${latestRelease}"
|
||||
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}"
|
||||
|
||||
run bash -c 'echo "unstaged changes" >> "$ABRA_DIR/recipes/$TEST_RECIPE/foo"'
|
||||
assert_success
|
||||
@ -270,17 +206,28 @@ teardown(){
|
||||
--no-input --no-converge-checks --chaos
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}"
|
||||
assert_output --partial 'CHAOS DEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}"
|
||||
assert_output --partial "ENV VERSION ${headHash:0:8}"
|
||||
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}+U"
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}+U"
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" \
|
||||
--no-input --no-converge-checks --chaos
|
||||
assert_success
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}+U"
|
||||
assert_output --partial 'CHAOS DEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}+U"
|
||||
assert_output --partial "ENV VERSION ${headHash:0:8}+U"
|
||||
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}+U"
|
||||
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" \
|
||||
--no-input --no-converge-checks --chaos
|
||||
assert_success
|
||||
|
||||
assert_output --partial 'CHAOS DEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}+U"
|
||||
assert_output --partial "ENV VERSION ${headHash:0:8}+U"
|
||||
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}+U"
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
@ -302,19 +249,8 @@ teardown(){
|
||||
--no-input --no-converge-checks --force
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}"
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "${headHash:0:8}"
|
||||
assert_output --regexp 'NEW VERSION.*' + "${latestRelease}"
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
assert_output --partial 'REDEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}"
|
||||
assert_output --partial "ENV VERSION ${headHash:0:8}"
|
||||
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}"
|
||||
}
|
||||
|
@ -20,8 +20,11 @@ setup(){
|
||||
|
||||
teardown(){
|
||||
_reset_recipe
|
||||
_reset_app
|
||||
_undeploy_app
|
||||
_reset_app
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
|
||||
@test "validate app argument" {
|
||||
@ -41,6 +44,16 @@ teardown(){
|
||||
}
|
||||
|
||||
@test "show env version despite --chaos" {
|
||||
run bash -c "echo foo >> $ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_success
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
|
||||
run $ABRA app env "$TEST_APP_DOMAIN"
|
||||
assert_success
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
|
@ -21,8 +21,8 @@ setup(){
|
||||
|
||||
teardown(){
|
||||
_reset_recipe
|
||||
_reset_app
|
||||
_undeploy_app
|
||||
_reset_app
|
||||
_reset_tags
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
@ -46,6 +46,9 @@ teardown(){
|
||||
assert_success
|
||||
assert_output --partial 'foo'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run $ABRA app labels "$TEST_APP_DOMAIN" --no-input
|
||||
assert_failure
|
||||
}
|
||||
@ -59,6 +62,9 @@ teardown(){
|
||||
assert_success
|
||||
assert_output --partial 'foo'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run $ABRA app labels "$TEST_APP_DOMAIN" --chaos
|
||||
assert_success
|
||||
}
|
||||
|
@ -20,6 +20,10 @@ setup(){
|
||||
teardown(){
|
||||
_rm_app
|
||||
_reset_recipe
|
||||
_reset_tags
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
|
||||
@test "create new app" {
|
||||
@ -47,25 +51,22 @@ teardown(){
|
||||
assert_success
|
||||
assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
|
||||
assert_equal $(_get_tag_hash 0.3.0+1.21.0) $(_get_current_hash)
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:0.3.0+1.21.0" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "create new app with chaos commit" {
|
||||
run $ABRA app new "$TEST_RECIPE" 1e83340e \
|
||||
@test "create new app with version commit" {
|
||||
tagHash=$(_get_tag_hash "0.3.0+1.21.0")
|
||||
|
||||
run $ABRA app new "$TEST_RECIPE" "$tagHash" \
|
||||
--no-input \
|
||||
--server "$TEST_SERVER" \
|
||||
--domain "$TEST_APP_DOMAIN"
|
||||
assert_success
|
||||
assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
|
||||
currentHash=$(_get_current_hash)
|
||||
assert_equal 1e83340e ${currentHash:0:8}
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:1e83340e" \
|
||||
run grep -q "TYPE=$TEST_RECIPE:${tagHash}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
}
|
||||
@ -101,6 +102,9 @@ teardown(){
|
||||
assert_failure
|
||||
assert_output --partial 'locally unstaged changes'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
@ -122,6 +126,13 @@ teardown(){
|
||||
assert_success
|
||||
assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" status
|
||||
assert_success
|
||||
assert_output --partial 'foo'
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
@ -167,6 +178,8 @@ teardown(){
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "generate secrets" {
|
||||
latestRelease=$(_latest_release)
|
||||
|
||||
run $ABRA app new "$TEST_RECIPE" \
|
||||
--no-input \
|
||||
--server "$TEST_SERVER" \
|
||||
@ -178,4 +191,64 @@ teardown(){
|
||||
run $ABRA app secret ls "$TEST_APP_DOMAIN"
|
||||
assert_success
|
||||
assert_output --partial 'test_pass_one'
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "app new from chaos recipe" {
|
||||
currentHash=$(_get_current_hash)
|
||||
latestRelease=$(_latest_release)
|
||||
|
||||
run bash -c "echo foo >> $ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_success
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
|
||||
run $ABRA app new "$TEST_RECIPE" \
|
||||
--no-input \
|
||||
--server "$TEST_SERVER" \
|
||||
--domain "$TEST_APP_DOMAIN" \
|
||||
--secrets \
|
||||
--chaos
|
||||
assert_success
|
||||
assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_output --partial "version: ${currentHash:0:8}"
|
||||
assert_output --partial "chaos: ${currentHash:0:8}"
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${currentHash:0:8}+U" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "app new, no releases, from chaos recipe" {
|
||||
currentHash=$(_get_current_hash)
|
||||
_remove_tags
|
||||
|
||||
run bash -c "echo foo >> $ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_success
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
|
||||
run $ABRA app new "$TEST_RECIPE" \
|
||||
--no-input \
|
||||
--server "$TEST_SERVER" \
|
||||
--domain "$TEST_APP_DOMAIN" \
|
||||
--secrets \
|
||||
--chaos
|
||||
assert_success
|
||||
assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_output --partial "version: ${currentHash:0:8}"
|
||||
assert_output --partial "chaos: ${currentHash:0:8}"
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${currentHash:0:8}+U" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
}
|
||||
|
@ -55,6 +55,9 @@ teardown(){
|
||||
assert_failure
|
||||
assert_output --partial 'locally unstaged changes'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
@ -70,6 +73,9 @@ teardown(){
|
||||
run $ABRA app ps --chaos "$TEST_APP_DOMAIN"
|
||||
assert_success
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
@ -117,6 +123,8 @@ teardown(){
|
||||
@test "show ps report" {
|
||||
_deploy_app
|
||||
|
||||
_ensure_env_version "$(_latest_release)"
|
||||
|
||||
run $ABRA app ps "$TEST_APP_DOMAIN"
|
||||
assert_success
|
||||
assert_output --partial 'app'
|
||||
|
@ -20,6 +20,7 @@ setup(){
|
||||
|
||||
teardown(){
|
||||
_undeploy_app
|
||||
_reset_app
|
||||
_reset_recipe
|
||||
}
|
||||
|
||||
@ -152,7 +153,7 @@ teardown(){
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "rollback chaos deployment" {
|
||||
@test "rollback chaos deployment is not possible" {
|
||||
tagHash=$(_get_tag_hash "0.2.0+1.21.0")
|
||||
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" checkout "$tagHash"
|
||||
assert_success
|
||||
@ -162,17 +163,8 @@ teardown(){
|
||||
assert_output --partial "${tagHash:0:8}"
|
||||
|
||||
run $ABRA app rollback "$TEST_APP_DOMAIN" "0.1.1+1.20.2" --no-input --no-converge-checks
|
||||
assert_success
|
||||
assert_output --partial "0.1.1+1.20.2"
|
||||
assert_output --partial "${tagHash:0:8}"
|
||||
|
||||
run $ABRA app rollback "$TEST_APP_DOMAIN" "0.1.0+1.20.0" --no-input --no-converge-checks
|
||||
assert_success
|
||||
assert_output --partial "0.1.0+1.20.0"
|
||||
|
||||
tagHash=$(_get_tag_hash "0.1.1+1.20.2")
|
||||
refute_output --partial "${tagHash:0:8}"
|
||||
assert_output --partial "false"
|
||||
assert_failure
|
||||
assert_output --partial 'current deployment' + "${tagHash:0:8}" + 'is not a known version'
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@ -185,3 +177,16 @@ teardown(){
|
||||
assert_failure
|
||||
assert_output --partial "not a known version"
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "no chaos version label if no chaos" {
|
||||
_deploy_app
|
||||
|
||||
run $ABRA app rollback "$TEST_APP_DOMAIN" \
|
||||
--no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
run $ABRA app labels "$TEST_APP_DOMAIN" --no-input
|
||||
assert_success
|
||||
refute_output --regexp "coop-cloud.abra-test-recipe.$TEST_SERVER.chaos-version"
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ setup(){
|
||||
|
||||
teardown(){
|
||||
_undeploy_app
|
||||
_reset_app
|
||||
_reset_recipe
|
||||
}
|
||||
|
||||
@ -32,17 +33,10 @@ teardown(){
|
||||
--no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --regexp 'NEW VERSION.*' + "0.1.0+1.20.0"
|
||||
assert_output --partial 'DOWNGRADE OVERVIEW'
|
||||
assert_output --partial 'CURRENT DEPLOYMENT 0.2.0+1.21.0'
|
||||
assert_output --partial 'ENV VERSION 0.2.0+1.21.0'
|
||||
assert_output --partial 'NEW DEPLOYMENT 0.1.0+1.20.0'
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:0.1.0+1.20.0" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
@ -58,19 +52,33 @@ teardown(){
|
||||
--no-input --no-converge-checks --force
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --regexp 'NEW VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --partial 'REDEPLOY OVERVIEW'
|
||||
assert_output --partial 'CURRENT DEPLOYMENT 0.2.0+1.21.0'
|
||||
assert_output --partial 'ENV VERSION 0.2.0+1.21.0'
|
||||
assert_output --partial 'NEW DEPLOYMENT 0.2.0+1.21.0'
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:0.2.0+1.21.0" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "app rollback no .env version" {
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" "0.2.0+1.21.0" \
|
||||
--no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
_wipe_env_version
|
||||
|
||||
run $ABRA app rollback "$TEST_APP_DOMAIN" "0.1.0+1.20.0" \
|
||||
--no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
assert_output --partial 'DOWNGRADE OVERVIEW'
|
||||
assert_output --partial 'CURRENT DEPLOYMENT 0.2.0+1.21.0'
|
||||
assert_output --partial 'ENV VERSION N/A'
|
||||
assert_output --partial 'NEW DEPLOYMENT 0.1.0+1.20.0'
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
}
|
||||
|
@ -41,6 +41,11 @@ teardown(){
|
||||
|
||||
run $ABRA app secret generate "$TEST_APP_DOMAIN"
|
||||
assert_failure
|
||||
assert_output --partial 'missing arguments'
|
||||
|
||||
run $ABRA app secret generate "$TEST_APP_DOMAIN" test_pass_one
|
||||
assert_failure
|
||||
assert_output --partial 'missing arguments'
|
||||
|
||||
run $ABRA app secret generate "$TEST_APP_DOMAIN" testSecret testVersion --all
|
||||
assert_failure
|
||||
@ -131,6 +136,9 @@ teardown(){
|
||||
assert_failure
|
||||
assert_output --partial 'locally unstaged changes'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
@ -271,6 +279,9 @@ teardown(){
|
||||
assert_failure
|
||||
assert_output --partial 'locally unstaged changes'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
@ -319,6 +330,9 @@ teardown(){
|
||||
assert_failure
|
||||
assert_output --partial 'locally unstaged changes'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
|
@ -92,9 +92,6 @@ teardown(){
|
||||
|
||||
run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input
|
||||
assert_success
|
||||
|
||||
# NOTE(d1): ensure not chaos undeploy
|
||||
assert_output --partial 'false'
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
|
@ -33,13 +33,10 @@ teardown(){
|
||||
run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "0.1.0+1.20.0"
|
||||
assert_output --regexp 'NEW VERSION.*' + "0.1.0+1.20.0"
|
||||
assert_output --partial 'UNDEPLOY OVERVIEW'
|
||||
assert_output --partial 'CURRENT DEPLOYMENT 0.1.0+1.20.0'
|
||||
assert_output --partial 'ENV VERSION 0.1.0+1.20.0'
|
||||
assert_output --partial 'NEW DEPLOYMENT N/A'
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:0.1.0+1.20.0" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
@ -57,13 +54,10 @@ teardown(){
|
||||
run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}"
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}"
|
||||
assert_output --partial 'UNDEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}"
|
||||
assert_output --partial "ENV VERSION ${headHash:0:8}"
|
||||
assert_output --partial 'NEW DEPLOYMENT N/A'
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
@ -72,7 +66,6 @@ teardown(){
|
||||
|
||||
@test "chaos deploy with unstaged commits and undeploy" {
|
||||
headHash=$(_get_head_hash)
|
||||
latestRelease=$(_latest_release)
|
||||
|
||||
run bash -c 'echo "unstaged changes" >> "$ABRA_DIR/recipes/$TEST_RECIPE/foo"'
|
||||
assert_success
|
||||
@ -85,13 +78,10 @@ teardown(){
|
||||
run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}+U"
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
|
||||
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}+U"
|
||||
assert_output --partial 'UNDEPLOY OVERVIEW'
|
||||
assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}+U"
|
||||
assert_output --partial "ENV VERSION ${headHash:0:8}+U"
|
||||
assert_output --partial 'NEW DEPLOYMENT N/A'
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
|
@ -205,7 +205,7 @@ teardown(){
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "upgrade chaos deployment" {
|
||||
@test "upgrade commit deployment not possible" {
|
||||
tagHash=$(_get_tag_hash "0.1.0+1.20.0")
|
||||
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" checkout "$tagHash"
|
||||
assert_success
|
||||
@ -215,17 +215,8 @@ teardown(){
|
||||
assert_output --partial "${tagHash:0:8}"
|
||||
|
||||
run $ABRA app upgrade "$TEST_APP_DOMAIN" "0.1.1+1.20.2" --no-input --no-converge-checks
|
||||
assert_success
|
||||
assert_output --partial "0.1.1+1.20.2"
|
||||
assert_output --partial "${tagHash:0:8}"
|
||||
|
||||
run $ABRA app upgrade "$TEST_APP_DOMAIN" "0.2.0+1.21.0" --no-input --no-converge-checks
|
||||
assert_success
|
||||
assert_output --partial "0.2.0+1.21.0"
|
||||
|
||||
tagHash=$(_get_tag_hash "0.1.1+1.20.2")
|
||||
refute_output --partial "${tagHash:0:8}"
|
||||
assert_output --partial "false"
|
||||
assert_failure
|
||||
assert_output --partial "not a known version"
|
||||
}
|
||||
|
||||
@test "chaos commit upgrade not possible" {
|
||||
@ -239,3 +230,16 @@ teardown(){
|
||||
assert_failure
|
||||
assert_output --partial "not a known version"
|
||||
}
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "no chaos version label if no chaos" {
|
||||
_deploy_app
|
||||
|
||||
run $ABRA app upgrade "$TEST_APP_DOMAIN" \
|
||||
--no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
run $ABRA app labels "$TEST_APP_DOMAIN" --no-input
|
||||
assert_success
|
||||
refute_output --regexp "coop-cloud.abra-test-recipe.$TEST_SERVER.chaos-version"
|
||||
}
|
||||
|
@ -31,17 +31,10 @@ teardown(){
|
||||
--no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "0.1.0+1.20.0"
|
||||
assert_output --regexp 'NEW VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --partial 'UPGRADE OVERVIEW'
|
||||
assert_output --partial 'CURRENT DEPLOYMENT 0.1.0+1.20.0'
|
||||
assert_output --partial 'ENV VERSION 0.1.0+1.20.0'
|
||||
assert_output --partial 'NEW DEPLOYMENT 0.2.0+1.21.0'
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:0.2.0+1.21.0" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
@ -57,19 +50,35 @@ teardown(){
|
||||
--no-input --no-converge-checks --force
|
||||
assert_success
|
||||
|
||||
# current deployment
|
||||
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# new deployment
|
||||
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --regexp 'CHAOS.*false'
|
||||
|
||||
# env version
|
||||
assert_output --regexp 'CURRENT VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --regexp 'NEW VERSION.*' + "0.2.0+1.21.0"
|
||||
assert_output --partial 'REDEPLOY OVERVIEW'
|
||||
assert_output --partial 'CURRENT DEPLOYMENT 0.2.0+1.21.0'
|
||||
assert_output --partial 'ENV VERSION 0.2.0+1.21.0'
|
||||
assert_output --partial 'NEW DEPLOYMENT 0.2.0+1.21.0'
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:0.2.0+1.21.0" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "app upgrade no .env version" {
|
||||
latestRelease=$(_latest_release)
|
||||
|
||||
run $ABRA app deploy "$TEST_APP_DOMAIN" "0.2.0+1.21.0" \
|
||||
--no-input --no-converge-checks
|
||||
assert_success
|
||||
|
||||
_wipe_env_version
|
||||
|
||||
run $ABRA app upgrade "$TEST_APP_DOMAIN" \
|
||||
--no-input --no-converge-checks --force
|
||||
assert_success
|
||||
|
||||
assert_output --partial 'UPGRADE OVERVIEW'
|
||||
assert_output --partial 'CURRENT DEPLOYMENT 0.2.0+1.21.0'
|
||||
assert_output --partial 'ENV VERSION N/A'
|
||||
assert_output --partial 'NEW DEPLOYMENT 0.3.1+1.21.0'
|
||||
|
||||
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
|
||||
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
|
||||
assert_success
|
||||
}
|
||||
|
@ -30,6 +30,15 @@ _undeploy_app() {
|
||||
assert_output --partial 'unknown'
|
||||
}
|
||||
|
||||
_undeploy_app2() {
|
||||
run $ABRA app undeploy "$1" --no-input
|
||||
|
||||
run $ABRA app ls --server "$TEST_SERVER" --status
|
||||
assert_success
|
||||
assert_output --partial "$1"
|
||||
assert_output --partial 'unknown'
|
||||
}
|
||||
|
||||
_rm_app() {
|
||||
# NOTE(d1): not asserting outcomes on teardown here since some might fail
|
||||
# depending on what the test created. all commands run through anyway
|
||||
|
@ -38,6 +38,8 @@ _set_git_author() {
|
||||
}
|
||||
|
||||
_git_commit() {
|
||||
_set_git_author
|
||||
|
||||
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" add .
|
||||
assert_success
|
||||
|
||||
@ -60,3 +62,7 @@ _get_current_hash() {
|
||||
_get_n_hash() {
|
||||
echo $(git -C "$ABRA_DIR/recipes/$TEST_RECIPE" show -s --format="%H" "HEAD~$1")
|
||||
}
|
||||
|
||||
_git_status() {
|
||||
echo $(git -C "$ABRA_DIR/recipes/$TEST_RECIPE" status --porcelain)
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
_latest_release(){
|
||||
echo $(git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag -l | tail -n 1)
|
||||
echo $(git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag -l --sort=v:refname | tail -n 1)
|
||||
}
|
||||
|
||||
_fetch_recipe() {
|
||||
@ -22,15 +22,6 @@ _reset_recipe(){
|
||||
_fetch_recipe
|
||||
}
|
||||
|
||||
_ensure_latest_version(){
|
||||
latestRelease=$(_latest_release)
|
||||
|
||||
if [ ! $latestRelease = "$1" ]; then
|
||||
echo "expected latest recipe version of '$1', saw: $latestRelease"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
_ensure_catalogue(){
|
||||
if [[ ! -d "$ABRA_DIR/catalogue" ]]; then
|
||||
run git clone https://git.coopcloud.tech/toolshed/recipes-catalogue-json.git $ABRA_DIR/catalogue
|
||||
|
@ -28,8 +28,6 @@ teardown(){
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "install release candidate from script" {
|
||||
skip "current RC is brokenly specified in the installer script"
|
||||
|
||||
run bash -c 'curl https://install.abra.coopcloud.tech | bash -s -- --rc'
|
||||
assert_success
|
||||
|
||||
|
@ -41,6 +41,9 @@ teardown(){
|
||||
assert_success
|
||||
assert_output --partial 'foo'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run $ABRA recipe lint "$TEST_RECIPE"
|
||||
assert_failure
|
||||
assert_output --partial 'locally unstaged changes'
|
||||
@ -58,6 +61,9 @@ teardown(){
|
||||
assert_success
|
||||
assert_output --partial 'foo'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run $ABRA recipe lint "$TEST_RECIPE" --chaos
|
||||
assert_success
|
||||
|
||||
|
@ -20,6 +20,7 @@ setup(){
|
||||
|
||||
teardown() {
|
||||
_reset_recipe
|
||||
_reset_tags
|
||||
}
|
||||
|
||||
@test "validate recipe argument" {
|
||||
@ -31,8 +32,6 @@ teardown() {
|
||||
}
|
||||
|
||||
@test "release patch bump" {
|
||||
_ensure_latest_version "0.3.0+1.21.0"
|
||||
|
||||
run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --patch
|
||||
assert_success
|
||||
|
||||
@ -40,6 +39,12 @@ teardown() {
|
||||
assert_success
|
||||
assert_output --partial 'image: nginx:1.21.6'
|
||||
|
||||
# NOTE(d1): ensure the latest tag is the one we expect
|
||||
_remove_tags
|
||||
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag \
|
||||
-a "0.3.0+1.21.0" -m "fake: 0.3.0+1.21.0"
|
||||
assert_success
|
||||
|
||||
run $ABRA recipe sync "$TEST_RECIPE" --no-input --patch
|
||||
assert_success
|
||||
assert_output --partial 'synced label'
|
||||
@ -58,8 +63,6 @@ teardown() {
|
||||
}
|
||||
|
||||
@test "release minor bump" {
|
||||
_ensure_latest_version "0.3.0+1.21.0"
|
||||
|
||||
run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --minor
|
||||
assert_success
|
||||
|
||||
@ -67,6 +70,12 @@ teardown() {
|
||||
assert_success
|
||||
assert_output --regexp 'image: nginx:1.2.*'
|
||||
|
||||
# NOTE(d1): ensure the latest tag is the one we expect
|
||||
_remove_tags
|
||||
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag \
|
||||
-a "0.3.0+1.21.0" -m "fake: 0.3.0+1.21.0"
|
||||
assert_success
|
||||
|
||||
run $ABRA recipe sync "$TEST_RECIPE" --no-input --minor
|
||||
assert_success
|
||||
assert_output --partial 'synced label'
|
||||
@ -102,8 +111,6 @@ teardown() {
|
||||
}
|
||||
|
||||
@test "release with next release note" {
|
||||
_ensure_latest_version "0.3.0+1.21.0"
|
||||
|
||||
_mkfile "$ABRA_DIR/recipes/$TEST_RECIPE/release/next" "those are some release notes for the next release"
|
||||
|
||||
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" add release/next
|
||||
|
@ -40,6 +40,9 @@ teardown(){
|
||||
run $ABRA recipe sync "$TEST_RECIPE" --no-input --patch
|
||||
assert_success
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "M compose.yml ?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_success
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
@ -58,8 +61,6 @@ teardown(){
|
||||
}
|
||||
|
||||
@test "sync patch label bump" {
|
||||
_ensure_latest_version "0.3.0+1.21.0"
|
||||
|
||||
run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --patch
|
||||
assert_success
|
||||
|
||||
@ -67,6 +68,12 @@ teardown(){
|
||||
assert_success
|
||||
assert_output --partial 'image: nginx:1.21.6'
|
||||
|
||||
# NOTE(d1): ensure the latest tag is the one we expect
|
||||
_remove_tags
|
||||
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag \
|
||||
-a "0.3.0+1.21.0" -m "fake: 0.3.0+1.21.0"
|
||||
assert_success
|
||||
|
||||
run $ABRA recipe sync "$TEST_RECIPE" --no-input --patch
|
||||
assert_success
|
||||
|
||||
@ -76,8 +83,6 @@ teardown(){
|
||||
}
|
||||
|
||||
@test "sync minor label bump" {
|
||||
_ensure_latest_version "0.3.0+1.21.0"
|
||||
|
||||
run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --minor
|
||||
assert_success
|
||||
|
||||
@ -85,6 +90,12 @@ teardown(){
|
||||
assert_success
|
||||
assert_output --regexp 'image: nginx:1.2.*'
|
||||
|
||||
# NOTE(d1): ensure the latest tag is the one we expect
|
||||
_remove_tags
|
||||
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag \
|
||||
-a "0.3.0+1.21.0" -m "fake: 0.3.0+1.21.0"
|
||||
assert_success
|
||||
|
||||
run $ABRA recipe sync "$TEST_RECIPE" --no-input --minor
|
||||
assert_success
|
||||
|
||||
|
@ -54,6 +54,9 @@ teardown(){
|
||||
assert_failure
|
||||
assert_output --partial 'locally unstaged changes'
|
||||
|
||||
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_equal "$(_git_status)" "?? foo"
|
||||
|
||||
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
|
||||
}
|
||||
|
@ -29,8 +29,6 @@ teardown(){
|
||||
|
||||
# bats test_tags=slow
|
||||
@test "abra upgrade release candidate" {
|
||||
skip "TODO: RC publishing broke somehow, needs investigation"
|
||||
|
||||
run $ABRA upgrade --rc
|
||||
assert_success
|
||||
assert_output --partial 'Public interest infrastructure'
|
||||
|
8
vendor/github.com/ProtonMail/go-crypto/internal/byteutil/byteutil.go
generated
vendored
8
vendor/github.com/ProtonMail/go-crypto/internal/byteutil/byteutil.go
generated
vendored
@ -49,16 +49,16 @@ func ShiftNBytesLeft(dst, x []byte, n int) {
|
||||
dst = append(dst, make([]byte, n/8)...)
|
||||
}
|
||||
|
||||
// XorBytesMut assumes equal input length, replaces X with X XOR Y
|
||||
// XorBytesMut replaces X with X XOR Y. len(X) must be >= len(Y).
|
||||
func XorBytesMut(X, Y []byte) {
|
||||
for i := 0; i < len(X); i++ {
|
||||
for i := 0; i < len(Y); i++ {
|
||||
X[i] ^= Y[i]
|
||||
}
|
||||
}
|
||||
|
||||
// XorBytes assumes equal input length, puts X XOR Y into Z
|
||||
// XorBytes puts X XOR Y into Z. len(Z) and len(X) must be >= len(Y).
|
||||
func XorBytes(Z, X, Y []byte) {
|
||||
for i := 0; i < len(X); i++ {
|
||||
for i := 0; i < len(Y); i++ {
|
||||
Z[i] = X[i] ^ Y[i]
|
||||
}
|
||||
}
|
||||
|
55
vendor/github.com/ProtonMail/go-crypto/ocb/ocb.go
generated
vendored
55
vendor/github.com/ProtonMail/go-crypto/ocb/ocb.go
generated
vendored
@ -109,8 +109,10 @@ func (o *ocb) Seal(dst, nonce, plaintext, adata []byte) []byte {
|
||||
if len(nonce) > o.nonceSize {
|
||||
panic("crypto/ocb: Incorrect nonce length given to OCB")
|
||||
}
|
||||
ret, out := byteutil.SliceForAppend(dst, len(plaintext)+o.tagSize)
|
||||
o.crypt(enc, out, nonce, adata, plaintext)
|
||||
sep := len(plaintext)
|
||||
ret, out := byteutil.SliceForAppend(dst, sep+o.tagSize)
|
||||
tag := o.crypt(enc, out[:sep], nonce, adata, plaintext)
|
||||
copy(out[sep:], tag)
|
||||
return ret
|
||||
}
|
||||
|
||||
@ -122,12 +124,10 @@ func (o *ocb) Open(dst, nonce, ciphertext, adata []byte) ([]byte, error) {
|
||||
return nil, ocbError("Ciphertext shorter than tag length")
|
||||
}
|
||||
sep := len(ciphertext) - o.tagSize
|
||||
ret, out := byteutil.SliceForAppend(dst, len(ciphertext))
|
||||
ret, out := byteutil.SliceForAppend(dst, sep)
|
||||
ciphertextData := ciphertext[:sep]
|
||||
tag := ciphertext[sep:]
|
||||
o.crypt(dec, out, nonce, adata, ciphertextData)
|
||||
if subtle.ConstantTimeCompare(ret[sep:], tag) == 1 {
|
||||
ret = ret[:sep]
|
||||
tag := o.crypt(dec, out, nonce, adata, ciphertextData)
|
||||
if subtle.ConstantTimeCompare(tag, ciphertext[sep:]) == 1 {
|
||||
return ret, nil
|
||||
}
|
||||
for i := range out {
|
||||
@ -137,7 +137,8 @@ func (o *ocb) Open(dst, nonce, ciphertext, adata []byte) ([]byte, error) {
|
||||
}
|
||||
|
||||
// On instruction enc (resp. dec), crypt is the encrypt (resp. decrypt)
|
||||
// function. It returns the resulting plain/ciphertext with the tag appended.
|
||||
// function. It writes the resulting plain/ciphertext into Y and returns
|
||||
// the tag.
|
||||
func (o *ocb) crypt(instruction int, Y, nonce, adata, X []byte) []byte {
|
||||
//
|
||||
// Consider X as a sequence of 128-bit blocks
|
||||
@ -194,13 +195,14 @@ func (o *ocb) crypt(instruction int, Y, nonce, adata, X []byte) []byte {
|
||||
byteutil.XorBytesMut(offset, o.mask.L[bits.TrailingZeros(uint(i+1))])
|
||||
blockX := X[i*blockSize : (i+1)*blockSize]
|
||||
blockY := Y[i*blockSize : (i+1)*blockSize]
|
||||
byteutil.XorBytes(blockY, blockX, offset)
|
||||
switch instruction {
|
||||
case enc:
|
||||
byteutil.XorBytesMut(checksum, blockX)
|
||||
byteutil.XorBytes(blockY, blockX, offset)
|
||||
o.block.Encrypt(blockY, blockY)
|
||||
byteutil.XorBytesMut(blockY, offset)
|
||||
byteutil.XorBytesMut(checksum, blockX)
|
||||
case dec:
|
||||
byteutil.XorBytes(blockY, blockX, offset)
|
||||
o.block.Decrypt(blockY, blockY)
|
||||
byteutil.XorBytesMut(blockY, offset)
|
||||
byteutil.XorBytesMut(checksum, blockY)
|
||||
@ -216,31 +218,24 @@ func (o *ocb) crypt(instruction int, Y, nonce, adata, X []byte) []byte {
|
||||
o.block.Encrypt(pad, offset)
|
||||
chunkX := X[blockSize*m:]
|
||||
chunkY := Y[blockSize*m : len(X)]
|
||||
byteutil.XorBytes(chunkY, chunkX, pad[:len(chunkX)])
|
||||
// P_* || bit(1) || zeroes(127) - len(P_*)
|
||||
switch instruction {
|
||||
case enc:
|
||||
paddedY := append(chunkX, byte(128))
|
||||
paddedY = append(paddedY, make([]byte, blockSize-len(chunkX)-1)...)
|
||||
byteutil.XorBytesMut(checksum, paddedY)
|
||||
byteutil.XorBytesMut(checksum, chunkX)
|
||||
checksum[len(chunkX)] ^= 128
|
||||
byteutil.XorBytes(chunkY, chunkX, pad[:len(chunkX)])
|
||||
// P_* || bit(1) || zeroes(127) - len(P_*)
|
||||
case dec:
|
||||
paddedX := append(chunkY, byte(128))
|
||||
paddedX = append(paddedX, make([]byte, blockSize-len(chunkY)-1)...)
|
||||
byteutil.XorBytesMut(checksum, paddedX)
|
||||
byteutil.XorBytes(chunkY, chunkX, pad[:len(chunkX)])
|
||||
// P_* || bit(1) || zeroes(127) - len(P_*)
|
||||
byteutil.XorBytesMut(checksum, chunkY)
|
||||
checksum[len(chunkY)] ^= 128
|
||||
}
|
||||
byteutil.XorBytes(tag, checksum, offset)
|
||||
byteutil.XorBytesMut(tag, o.mask.lDol)
|
||||
o.block.Encrypt(tag, tag)
|
||||
byteutil.XorBytesMut(tag, o.hash(adata))
|
||||
copy(Y[blockSize*m+len(chunkY):], tag[:o.tagSize])
|
||||
} else {
|
||||
byteutil.XorBytes(tag, checksum, offset)
|
||||
byteutil.XorBytesMut(tag, o.mask.lDol)
|
||||
o.block.Encrypt(tag, tag)
|
||||
byteutil.XorBytesMut(tag, o.hash(adata))
|
||||
copy(Y[blockSize*m:], tag[:o.tagSize])
|
||||
}
|
||||
return Y
|
||||
byteutil.XorBytes(tag, checksum, offset)
|
||||
byteutil.XorBytesMut(tag, o.mask.lDol)
|
||||
o.block.Encrypt(tag, tag)
|
||||
byteutil.XorBytesMut(tag, o.hash(adata))
|
||||
return tag[:o.tagSize]
|
||||
}
|
||||
|
||||
// This hash function is used to compute the tag. Per design, on empty input it
|
||||
|
12
vendor/github.com/ProtonMail/go-crypto/openpgp/armor/encode.go
generated
vendored
12
vendor/github.com/ProtonMail/go-crypto/openpgp/armor/encode.go
generated
vendored
@ -7,6 +7,7 @@ package armor
|
||||
import (
|
||||
"encoding/base64"
|
||||
"io"
|
||||
"sort"
|
||||
)
|
||||
|
||||
var armorHeaderSep = []byte(": ")
|
||||
@ -159,8 +160,15 @@ func encode(out io.Writer, blockType string, headers map[string]string, checksum
|
||||
return
|
||||
}
|
||||
|
||||
for k, v := range headers {
|
||||
err = writeSlices(out, []byte(k), armorHeaderSep, []byte(v), newline)
|
||||
keys := make([]string, len(headers))
|
||||
i := 0
|
||||
for k := range headers {
|
||||
keys[i] = k
|
||||
i++
|
||||
}
|
||||
sort.Strings(keys)
|
||||
for _, k := range keys {
|
||||
err = writeSlices(out, []byte(k), armorHeaderSep, []byte(headers[k]), newline)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
20
vendor/github.com/ProtonMail/go-crypto/openpgp/errors/errors.go
generated
vendored
20
vendor/github.com/ProtonMail/go-crypto/openpgp/errors/errors.go
generated
vendored
@ -6,6 +6,7 @@
|
||||
package errors // import "github.com/ProtonMail/go-crypto/openpgp/errors"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
@ -178,3 +179,22 @@ type ErrMalformedMessage string
|
||||
func (dke ErrMalformedMessage) Error() string {
|
||||
return "openpgp: malformed message " + string(dke)
|
||||
}
|
||||
|
||||
// ErrEncryptionKeySelection is returned if encryption key selection fails (v2 API).
|
||||
type ErrEncryptionKeySelection struct {
|
||||
PrimaryKeyId string
|
||||
PrimaryKeyErr error
|
||||
EncSelectionKeyId *string
|
||||
EncSelectionErr error
|
||||
}
|
||||
|
||||
func (eks ErrEncryptionKeySelection) Error() string {
|
||||
prefix := fmt.Sprintf("openpgp: key selection for primary key %s:", eks.PrimaryKeyId)
|
||||
if eks.PrimaryKeyErr != nil {
|
||||
return fmt.Sprintf("%s invalid primary key: %s", prefix, eks.PrimaryKeyErr)
|
||||
}
|
||||
if eks.EncSelectionKeyId != nil {
|
||||
return fmt.Sprintf("%s invalid encryption key %s: %s", prefix, *eks.EncSelectionKeyId, eks.EncSelectionErr)
|
||||
}
|
||||
return fmt.Sprintf("%s no encryption key: %s", prefix, eks.EncSelectionErr)
|
||||
}
|
||||
|
120
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/aead_crypter.go
generated
vendored
120
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/aead_crypter.go
generated
vendored
@ -3,7 +3,6 @@
|
||||
package packet
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/cipher"
|
||||
"encoding/binary"
|
||||
"io"
|
||||
@ -15,12 +14,11 @@ import (
|
||||
type aeadCrypter struct {
|
||||
aead cipher.AEAD
|
||||
chunkSize int
|
||||
initialNonce []byte
|
||||
nonce []byte
|
||||
associatedData []byte // Chunk-independent associated data
|
||||
chunkIndex []byte // Chunk counter
|
||||
packetTag packetType // SEIP packet (v2) or AEAD Encrypted Data packet
|
||||
bytesProcessed int // Amount of plaintext bytes encrypted/decrypted
|
||||
buffer bytes.Buffer // Buffered bytes across chunks
|
||||
}
|
||||
|
||||
// computeNonce takes the incremental index and computes an eXclusive OR with
|
||||
@ -28,12 +26,12 @@ type aeadCrypter struct {
|
||||
// 5.16.1 and 5.16.2). It returns the resulting nonce.
|
||||
func (wo *aeadCrypter) computeNextNonce() (nonce []byte) {
|
||||
if wo.packetTag == packetTypeSymmetricallyEncryptedIntegrityProtected {
|
||||
return append(wo.initialNonce, wo.chunkIndex...)
|
||||
return wo.nonce
|
||||
}
|
||||
|
||||
nonce = make([]byte, len(wo.initialNonce))
|
||||
copy(nonce, wo.initialNonce)
|
||||
offset := len(wo.initialNonce) - 8
|
||||
nonce = make([]byte, len(wo.nonce))
|
||||
copy(nonce, wo.nonce)
|
||||
offset := len(wo.nonce) - 8
|
||||
for i := 0; i < 8; i++ {
|
||||
nonce[i+offset] ^= wo.chunkIndex[i]
|
||||
}
|
||||
@ -62,8 +60,9 @@ func (wo *aeadCrypter) incrementIndex() error {
|
||||
type aeadDecrypter struct {
|
||||
aeadCrypter // Embedded ciphertext opener
|
||||
reader io.Reader // 'reader' is a partialLengthReader
|
||||
chunkBytes []byte
|
||||
peekedBytes []byte // Used to detect last chunk
|
||||
eof bool
|
||||
buffer []byte // Buffered decrypted bytes
|
||||
}
|
||||
|
||||
// Read decrypts bytes and reads them into dst. It decrypts when necessary and
|
||||
@ -71,59 +70,44 @@ type aeadDecrypter struct {
|
||||
// and an error.
|
||||
func (ar *aeadDecrypter) Read(dst []byte) (n int, err error) {
|
||||
// Return buffered plaintext bytes from previous calls
|
||||
if ar.buffer.Len() > 0 {
|
||||
return ar.buffer.Read(dst)
|
||||
}
|
||||
|
||||
// Return EOF if we've previously validated the final tag
|
||||
if ar.eof {
|
||||
return 0, io.EOF
|
||||
if len(ar.buffer) > 0 {
|
||||
n = copy(dst, ar.buffer)
|
||||
ar.buffer = ar.buffer[n:]
|
||||
return
|
||||
}
|
||||
|
||||
// Read a chunk
|
||||
tagLen := ar.aead.Overhead()
|
||||
cipherChunkBuf := new(bytes.Buffer)
|
||||
_, errRead := io.CopyN(cipherChunkBuf, ar.reader, int64(ar.chunkSize+tagLen))
|
||||
cipherChunk := cipherChunkBuf.Bytes()
|
||||
if errRead != nil && errRead != io.EOF {
|
||||
copy(ar.chunkBytes, ar.peekedBytes) // Copy bytes peeked in previous chunk or in initialization
|
||||
bytesRead, errRead := io.ReadFull(ar.reader, ar.chunkBytes[tagLen:])
|
||||
if errRead != nil && errRead != io.EOF && errRead != io.ErrUnexpectedEOF {
|
||||
return 0, errRead
|
||||
}
|
||||
|
||||
if len(cipherChunk) > 0 {
|
||||
decrypted, errChunk := ar.openChunk(cipherChunk)
|
||||
if bytesRead > 0 {
|
||||
ar.peekedBytes = ar.chunkBytes[bytesRead:bytesRead+tagLen]
|
||||
|
||||
decrypted, errChunk := ar.openChunk(ar.chunkBytes[:bytesRead])
|
||||
if errChunk != nil {
|
||||
return 0, errChunk
|
||||
}
|
||||
|
||||
// Return decrypted bytes, buffering if necessary
|
||||
if len(dst) < len(decrypted) {
|
||||
n = copy(dst, decrypted[:len(dst)])
|
||||
ar.buffer.Write(decrypted[len(dst):])
|
||||
} else {
|
||||
n = copy(dst, decrypted)
|
||||
}
|
||||
n = copy(dst, decrypted)
|
||||
ar.buffer = decrypted[n:]
|
||||
return
|
||||
}
|
||||
|
||||
// Check final authentication tag
|
||||
if errRead == io.EOF {
|
||||
errChunk := ar.validateFinalTag(ar.peekedBytes)
|
||||
if errChunk != nil {
|
||||
return n, errChunk
|
||||
}
|
||||
ar.eof = true // Mark EOF for when we've returned all buffered data
|
||||
}
|
||||
return
|
||||
return 0, io.EOF
|
||||
}
|
||||
|
||||
// Close is noOp. The final authentication tag of the stream was already
|
||||
// checked in the last Read call. In the future, this function could be used to
|
||||
// wipe the reader and peeked, decrypted bytes, if necessary.
|
||||
// Close checks the final authentication tag of the stream.
|
||||
// In the future, this function could also be used to wipe the reader
|
||||
// and peeked & decrypted bytes, if necessary.
|
||||
func (ar *aeadDecrypter) Close() (err error) {
|
||||
if !ar.eof {
|
||||
errChunk := ar.validateFinalTag(ar.peekedBytes)
|
||||
if errChunk != nil {
|
||||
return errChunk
|
||||
}
|
||||
errChunk := ar.validateFinalTag(ar.peekedBytes)
|
||||
if errChunk != nil {
|
||||
return errChunk
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -132,20 +116,13 @@ func (ar *aeadDecrypter) Close() (err error) {
|
||||
// the underlying plaintext and an error. It accesses peeked bytes from next
|
||||
// chunk, to identify the last chunk and decrypt/validate accordingly.
|
||||
func (ar *aeadDecrypter) openChunk(data []byte) ([]byte, error) {
|
||||
tagLen := ar.aead.Overhead()
|
||||
// Restore carried bytes from last call
|
||||
chunkExtra := append(ar.peekedBytes, data...)
|
||||
// 'chunk' contains encrypted bytes, followed by an authentication tag.
|
||||
chunk := chunkExtra[:len(chunkExtra)-tagLen]
|
||||
ar.peekedBytes = chunkExtra[len(chunkExtra)-tagLen:]
|
||||
|
||||
adata := ar.associatedData
|
||||
if ar.aeadCrypter.packetTag == packetTypeAEADEncrypted {
|
||||
adata = append(ar.associatedData, ar.chunkIndex...)
|
||||
}
|
||||
|
||||
nonce := ar.computeNextNonce()
|
||||
plainChunk, err := ar.aead.Open(nil, nonce, chunk, adata)
|
||||
plainChunk, err := ar.aead.Open(data[:0:len(data)], nonce, data, adata)
|
||||
if err != nil {
|
||||
return nil, errors.ErrAEADTagVerification
|
||||
}
|
||||
@ -183,27 +160,29 @@ func (ar *aeadDecrypter) validateFinalTag(tag []byte) error {
|
||||
type aeadEncrypter struct {
|
||||
aeadCrypter // Embedded plaintext sealer
|
||||
writer io.WriteCloser // 'writer' is a partialLengthWriter
|
||||
chunkBytes []byte
|
||||
offset int
|
||||
}
|
||||
|
||||
// Write encrypts and writes bytes. It encrypts when necessary and buffers extra
|
||||
// plaintext bytes for next call. When the stream is finished, Close() MUST be
|
||||
// called to append the final tag.
|
||||
func (aw *aeadEncrypter) Write(plaintextBytes []byte) (n int, err error) {
|
||||
// Append plaintextBytes to existing buffered bytes
|
||||
n, err = aw.buffer.Write(plaintextBytes)
|
||||
if err != nil {
|
||||
return n, err
|
||||
}
|
||||
// Encrypt and write chunks
|
||||
for aw.buffer.Len() >= aw.chunkSize {
|
||||
plainChunk := aw.buffer.Next(aw.chunkSize)
|
||||
encryptedChunk, err := aw.sealChunk(plainChunk)
|
||||
if err != nil {
|
||||
return n, err
|
||||
}
|
||||
_, err = aw.writer.Write(encryptedChunk)
|
||||
if err != nil {
|
||||
return n, err
|
||||
for n != len(plaintextBytes) {
|
||||
copied := copy(aw.chunkBytes[aw.offset:aw.chunkSize], plaintextBytes[n:])
|
||||
n += copied
|
||||
aw.offset += copied
|
||||
|
||||
if aw.offset == aw.chunkSize {
|
||||
encryptedChunk, err := aw.sealChunk(aw.chunkBytes[:aw.offset])
|
||||
if err != nil {
|
||||
return n, err
|
||||
}
|
||||
_, err = aw.writer.Write(encryptedChunk)
|
||||
if err != nil {
|
||||
return n, err
|
||||
}
|
||||
aw.offset = 0
|
||||
}
|
||||
}
|
||||
return
|
||||
@ -215,9 +194,8 @@ func (aw *aeadEncrypter) Write(plaintextBytes []byte) (n int, err error) {
|
||||
func (aw *aeadEncrypter) Close() (err error) {
|
||||
// Encrypt and write a chunk if there's buffered data left, or if we haven't
|
||||
// written any chunks yet.
|
||||
if aw.buffer.Len() > 0 || aw.bytesProcessed == 0 {
|
||||
plainChunk := aw.buffer.Bytes()
|
||||
lastEncryptedChunk, err := aw.sealChunk(plainChunk)
|
||||
if aw.offset > 0 || aw.bytesProcessed == 0 {
|
||||
lastEncryptedChunk, err := aw.sealChunk(aw.chunkBytes[:aw.offset])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -263,7 +241,7 @@ func (aw *aeadEncrypter) sealChunk(data []byte) ([]byte, error) {
|
||||
}
|
||||
|
||||
nonce := aw.computeNextNonce()
|
||||
encrypted := aw.aead.Seal(nil, nonce, data, adata)
|
||||
encrypted := aw.aead.Seal(data[:0], nonce, data, adata)
|
||||
aw.bytesProcessed += len(data)
|
||||
if err := aw.aeadCrypter.incrementIndex(); err != nil {
|
||||
return nil, err
|
||||
|
12
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/aead_encrypted.go
generated
vendored
12
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/aead_encrypted.go
generated
vendored
@ -65,24 +65,28 @@ func (ae *AEADEncrypted) decrypt(key []byte) (io.ReadCloser, error) {
|
||||
blockCipher := ae.cipher.new(key)
|
||||
aead := ae.mode.new(blockCipher)
|
||||
// Carry the first tagLen bytes
|
||||
chunkSize := decodeAEADChunkSize(ae.chunkSizeByte)
|
||||
tagLen := ae.mode.TagLength()
|
||||
peekedBytes := make([]byte, tagLen)
|
||||
chunkBytes := make([]byte, chunkSize+tagLen*2)
|
||||
peekedBytes := chunkBytes[chunkSize+tagLen:]
|
||||
n, err := io.ReadFull(ae.Contents, peekedBytes)
|
||||
if n < tagLen || (err != nil && err != io.EOF) {
|
||||
return nil, errors.AEADError("Not enough data to decrypt:" + err.Error())
|
||||
}
|
||||
chunkSize := decodeAEADChunkSize(ae.chunkSizeByte)
|
||||
|
||||
return &aeadDecrypter{
|
||||
aeadCrypter: aeadCrypter{
|
||||
aead: aead,
|
||||
chunkSize: chunkSize,
|
||||
initialNonce: ae.initialNonce,
|
||||
nonce: ae.initialNonce,
|
||||
associatedData: ae.associatedData(),
|
||||
chunkIndex: make([]byte, 8),
|
||||
packetTag: packetTypeAEADEncrypted,
|
||||
},
|
||||
reader: ae.Contents,
|
||||
peekedBytes: peekedBytes}, nil
|
||||
chunkBytes: chunkBytes,
|
||||
peekedBytes: peekedBytes,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// associatedData for chunks: tag, version, cipher, mode, chunk size byte
|
||||
|
12
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/config.go
generated
vendored
12
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/config.go
generated
vendored
@ -173,6 +173,11 @@ type Config struct {
|
||||
// weaknesses in the hash algo, potentially hindering e.g. some chosen-prefix attacks.
|
||||
// The default behavior, when the config or flag is nil, is to enable the feature.
|
||||
NonDeterministicSignaturesViaNotation *bool
|
||||
|
||||
// InsecureAllowAllKeyFlagsWhenMissing determines how a key without valid key flags is handled.
|
||||
// When set to true, a key without flags is treated as if all flags are enabled.
|
||||
// This behavior is consistent with GPG.
|
||||
InsecureAllowAllKeyFlagsWhenMissing bool
|
||||
}
|
||||
|
||||
func (c *Config) Random() io.Reader {
|
||||
@ -403,6 +408,13 @@ func (c *Config) RandomizeSignaturesViaNotation() bool {
|
||||
return *c.NonDeterministicSignaturesViaNotation
|
||||
}
|
||||
|
||||
func (c *Config) AllowAllKeyFlagsWhenMissing() bool {
|
||||
if c == nil {
|
||||
return false
|
||||
}
|
||||
return c.InsecureAllowAllKeyFlagsWhenMissing
|
||||
}
|
||||
|
||||
// BoolPointer is a helper function to set a boolean pointer in the Config.
|
||||
// e.g., config.CheckPacketSequence = BoolPointer(true)
|
||||
func BoolPointer(value bool) *bool {
|
||||
|
7
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/public_key.go
generated
vendored
7
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/public_key.go
generated
vendored
@ -1048,12 +1048,17 @@ func (pk *PublicKey) VerifyDirectKeySignature(sig *Signature) (err error) {
|
||||
// KeyIdString returns the public key's fingerprint in capital hex
|
||||
// (e.g. "6C7EE1B8621CC013").
|
||||
func (pk *PublicKey) KeyIdString() string {
|
||||
return fmt.Sprintf("%X", pk.Fingerprint[12:20])
|
||||
return fmt.Sprintf("%016X", pk.KeyId)
|
||||
}
|
||||
|
||||
// KeyIdShortString returns the short form of public key's fingerprint
|
||||
// in capital hex, as shown by gpg --list-keys (e.g. "621CC013").
|
||||
// This function will return the full key id for v5 and v6 keys
|
||||
// since the short key id is undefined for them.
|
||||
func (pk *PublicKey) KeyIdShortString() string {
|
||||
if pk.Version >= 5 {
|
||||
return pk.KeyIdString()
|
||||
}
|
||||
return fmt.Sprintf("%X", pk.Fingerprint[16:20])
|
||||
}
|
||||
|
||||
|
4
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/signature.go
generated
vendored
4
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/signature.go
generated
vendored
@ -1288,7 +1288,9 @@ func (sig *Signature) buildSubpackets(issuer PublicKey) (subpackets []outputSubp
|
||||
if sig.IssuerKeyId != nil && sig.Version == 4 {
|
||||
keyId := make([]byte, 8)
|
||||
binary.BigEndian.PutUint64(keyId, *sig.IssuerKeyId)
|
||||
subpackets = append(subpackets, outputSubpacket{true, issuerSubpacket, true, keyId})
|
||||
// Note: making this critical breaks RPM <=4.16.
|
||||
// See: https://github.com/ProtonMail/go-crypto/issues/263
|
||||
subpackets = append(subpackets, outputSubpacket{true, issuerSubpacket, false, keyId})
|
||||
}
|
||||
// Notation Data
|
||||
for _, notation := range sig.Notations {
|
||||
|
27
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_aead.go
generated
vendored
27
vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_aead.go
generated
vendored
@ -70,8 +70,10 @@ func (se *SymmetricallyEncrypted) decryptAead(inputKey []byte) (io.ReadCloser, e
|
||||
|
||||
aead, nonce := getSymmetricallyEncryptedAeadInstance(se.Cipher, se.Mode, inputKey, se.Salt[:], se.associatedData())
|
||||
// Carry the first tagLen bytes
|
||||
chunkSize := decodeAEADChunkSize(se.ChunkSizeByte)
|
||||
tagLen := se.Mode.TagLength()
|
||||
peekedBytes := make([]byte, tagLen)
|
||||
chunkBytes := make([]byte, chunkSize+tagLen*2)
|
||||
peekedBytes := chunkBytes[chunkSize+tagLen:]
|
||||
n, err := io.ReadFull(se.Contents, peekedBytes)
|
||||
if n < tagLen || (err != nil && err != io.EOF) {
|
||||
return nil, errors.StructuralError("not enough data to decrypt:" + err.Error())
|
||||
@ -81,12 +83,13 @@ func (se *SymmetricallyEncrypted) decryptAead(inputKey []byte) (io.ReadCloser, e
|
||||
aeadCrypter: aeadCrypter{
|
||||
aead: aead,
|
||||
chunkSize: decodeAEADChunkSize(se.ChunkSizeByte),
|
||||
initialNonce: nonce,
|
||||
nonce: nonce,
|
||||
associatedData: se.associatedData(),
|
||||
chunkIndex: make([]byte, 8),
|
||||
chunkIndex: nonce[len(nonce)-8:],
|
||||
packetTag: packetTypeSymmetricallyEncryptedIntegrityProtected,
|
||||
},
|
||||
reader: se.Contents,
|
||||
chunkBytes: chunkBytes,
|
||||
peekedBytes: peekedBytes,
|
||||
}, nil
|
||||
}
|
||||
@ -130,16 +133,20 @@ func serializeSymmetricallyEncryptedAead(ciphertext io.WriteCloser, cipherSuite
|
||||
|
||||
aead, nonce := getSymmetricallyEncryptedAeadInstance(cipherSuite.Cipher, cipherSuite.Mode, inputKey, salt, prefix)
|
||||
|
||||
chunkSize := decodeAEADChunkSize(chunkSizeByte)
|
||||
tagLen := aead.Overhead()
|
||||
chunkBytes := make([]byte, chunkSize+tagLen)
|
||||
return &aeadEncrypter{
|
||||
aeadCrypter: aeadCrypter{
|
||||
aead: aead,
|
||||
chunkSize: decodeAEADChunkSize(chunkSizeByte),
|
||||
chunkSize: chunkSize,
|
||||
associatedData: prefix,
|
||||
chunkIndex: make([]byte, 8),
|
||||
initialNonce: nonce,
|
||||
nonce: nonce,
|
||||
chunkIndex: nonce[len(nonce)-8:],
|
||||
packetTag: packetTypeSymmetricallyEncryptedIntegrityProtected,
|
||||
},
|
||||
writer: ciphertext,
|
||||
writer: ciphertext,
|
||||
chunkBytes: chunkBytes,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@ -149,10 +156,10 @@ func getSymmetricallyEncryptedAeadInstance(c CipherFunction, mode AEADMode, inpu
|
||||
encryptionKey := make([]byte, c.KeySize())
|
||||
_, _ = readFull(hkdfReader, encryptionKey)
|
||||
|
||||
// Last 64 bits of nonce are the counter
|
||||
nonce = make([]byte, mode.IvLength()-8)
|
||||
nonce = make([]byte, mode.IvLength())
|
||||
|
||||
_, _ = readFull(hkdfReader, nonce)
|
||||
// Last 64 bits of nonce are the counter
|
||||
_, _ = readFull(hkdfReader, nonce[:len(nonce)-8])
|
||||
|
||||
blockCipher := c.new(encryptionKey)
|
||||
aead = mode.new(blockCipher)
|
||||
|
@ -1,5 +1,6 @@
|
||||
run:
|
||||
tests: false
|
||||
issues-exit-code: 0
|
||||
|
||||
issues:
|
||||
include:
|
||||
@ -36,5 +37,4 @@ linters:
|
||||
- govet
|
||||
- ineffassign
|
||||
- staticcheck
|
||||
- typecheck
|
||||
- unused
|
28
vendor/github.com/charmbracelet/colorprofile/.golangci.yml
generated
vendored
Normal file
28
vendor/github.com/charmbracelet/colorprofile/.golangci.yml
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
run:
|
||||
tests: false
|
||||
|
||||
issues:
|
||||
include:
|
||||
- EXC0001
|
||||
- EXC0005
|
||||
- EXC0011
|
||||
- EXC0012
|
||||
- EXC0013
|
||||
|
||||
max-issues-per-linter: 0
|
||||
max-same-issues: 0
|
||||
|
||||
linters:
|
||||
enable:
|
||||
- bodyclose
|
||||
- gofumpt
|
||||
- goimports
|
||||
- gosec
|
||||
- nilerr
|
||||
- revive
|
||||
- rowserrcheck
|
||||
- sqlclosecheck
|
||||
- tparallel
|
||||
- unconvert
|
||||
- unparam
|
||||
- whitespace
|
6
vendor/github.com/charmbracelet/colorprofile/.goreleaser.yml
generated
vendored
Normal file
6
vendor/github.com/charmbracelet/colorprofile/.goreleaser.yml
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
includes:
|
||||
- from_url:
|
||||
url: charmbracelet/meta/main/goreleaser-lib.yaml
|
||||
|
||||
# yaml-language-server: $schema=https://goreleaser.com/static/schema-pro.json
|
||||
|
21
vendor/github.com/charmbracelet/colorprofile/LICENSE
generated
vendored
Normal file
21
vendor/github.com/charmbracelet/colorprofile/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020-2024 Charmbracelet, Inc
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
103
vendor/github.com/charmbracelet/colorprofile/README.md
generated
vendored
Normal file
103
vendor/github.com/charmbracelet/colorprofile/README.md
generated
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
# Colorprofile
|
||||
|
||||
<p>
|
||||
<a href="https://github.com/charmbracelet/colorprofile/releases"><img src="https://img.shields.io/github/release/charmbracelet/colorprofile.svg" alt="Latest Release"></a>
|
||||
<a href="https://pkg.go.dev/github.com/charmbracelet/colorprofile?tab=doc"><img src="https://godoc.org/github.com/charmbracelet/colorprofile?status.svg" alt="GoDoc"></a>
|
||||
<a href="https://github.com/charmbracelet/colorprofile/actions"><img src="https://github.com/charmbracelet/colorprofile/actions/workflows/build.yml/badge.svg" alt="Build Status"></a>
|
||||
</p>
|
||||
|
||||
A simple, powerful—and at times magical—package for detecting terminal color
|
||||
profiles and performing color (and CSI) degradation.
|
||||
|
||||
## Detecting the terminal’s color profile
|
||||
|
||||
Detecting the terminal’s color profile is easy.
|
||||
|
||||
```go
|
||||
import "github.com/charmbracelet/colorprofile"
|
||||
|
||||
// Detect the color profile. If you’re planning on writing to stderr you'd want
|
||||
// to use os.Stderr instead.
|
||||
p := colorprofile.Detect(os.Stdout, os.Environ())
|
||||
|
||||
// Comment on the profile.
|
||||
fmt.Printf("You know, your colors are quite %s.", func() string {
|
||||
switch p {
|
||||
case colorprofile.TrueColor:
|
||||
return "fancy"
|
||||
case colorprofile.ANSI256:
|
||||
return "1990s fancy"
|
||||
case colorprofile.ANSI:
|
||||
return "normcore"
|
||||
case colorprofile.Ascii:
|
||||
return "ancient"
|
||||
case colorprofile.NoTTY:
|
||||
return "naughty!"
|
||||
}
|
||||
return "...IDK" // this should never happen
|
||||
}())
|
||||
```
|
||||
|
||||
## Downsampling colors
|
||||
|
||||
When necessary, colors can be downsampled to a given profile, or manually
|
||||
downsampled to a specific profile.
|
||||
|
||||
```go
|
||||
p := colorprofile.Detect(os.Stdout, os.Environ())
|
||||
c := color.RGBA{0x6b, 0x50, 0xff, 0xff} // #6b50ff
|
||||
|
||||
// Downsample to the detected profile, when necessary.
|
||||
convertedColor := p.Convert(c)
|
||||
|
||||
// Or manually convert to a given profile.
|
||||
ansi256Color := colorprofile.ANSI256.Convert(c)
|
||||
ansiColor := colorprofile.ANSI.Convert(c)
|
||||
noColor := colorprofile.Ascii.Convert(c)
|
||||
noANSI := colorprofile.NoTTY.Convert(c)
|
||||
```
|
||||
|
||||
## Automatic downsampling with a Writer
|
||||
|
||||
You can also magically downsample colors in ANSI output, when necessary. If
|
||||
output is not a TTY ANSI will be dropped entirely.
|
||||
|
||||
```go
|
||||
myFancyANSI := "\x1b[38;2;107;80;255mCute \x1b[1;3mpuppy!!\x1b[m"
|
||||
|
||||
// Automatically downsample for the terminal at stdout.
|
||||
w := colorprofile.NewWriter(os.Stdout, os.Environ())
|
||||
fmt.Fprintf(w, myFancyANSI)
|
||||
|
||||
// Downsample to 4-bit ANSI.
|
||||
w.Profile = colorprofile.ANSI
|
||||
fmt.Fprintf(w, myFancyANSI)
|
||||
|
||||
// Ascii-fy, no colors.
|
||||
w.Profile = colorprofile.Ascii
|
||||
fmt.Fprintf(w, myFancyANSI)
|
||||
|
||||
// Strip ANSI altogether.
|
||||
w.Profile = colorprofile.NoTTY
|
||||
fmt.Fprintf(w, myFancyANSI) // not as fancy
|
||||
```
|
||||
|
||||
## Feedback
|
||||
|
||||
We’d love to hear your thoughts on this project. Feel free to drop us a note!
|
||||
|
||||
- [Twitter](https://twitter.com/charmcli)
|
||||
- [The Fediverse](https://mastodon.social/@charmcli)
|
||||
- [Discord](https://charm.sh/chat)
|
||||
|
||||
## License
|
||||
|
||||
[MIT](https://github.com/charmbracelet/bubbletea/raw/master/LICENSE)
|
||||
|
||||
---
|
||||
|
||||
Part of [Charm](https://charm.sh).
|
||||
|
||||
<a href="https://charm.sh/"><img alt="The Charm logo" src="https://stuff.charm.sh/charm-badge.jpg" width="400"></a>
|
||||
|
||||
Charm热爱开源 • Charm loves open source • نحنُ نحب المصادر المفتوحة
|
287
vendor/github.com/charmbracelet/colorprofile/env.go
generated
vendored
Normal file
287
vendor/github.com/charmbracelet/colorprofile/env.go
generated
vendored
Normal file
@ -0,0 +1,287 @@
|
||||
package colorprofile
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os/exec"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/charmbracelet/x/term"
|
||||
"github.com/xo/terminfo"
|
||||
)
|
||||
|
||||
// Detect returns the color profile based on the terminal output, and
|
||||
// environment variables. This respects NO_COLOR, CLICOLOR, and CLICOLOR_FORCE
|
||||
// environment variables.
|
||||
//
|
||||
// The rules as follows:
|
||||
// - TERM=dumb is always treated as NoTTY unless CLICOLOR_FORCE=1 is set.
|
||||
// - If COLORTERM=truecolor, and the profile is not NoTTY, it gest upgraded to TrueColor.
|
||||
// - Using any 256 color terminal (e.g. TERM=xterm-256color) will set the profile to ANSI256.
|
||||
// - Using any color terminal (e.g. TERM=xterm-color) will set the profile to ANSI.
|
||||
// - Using CLICOLOR=1 without TERM defined should be treated as ANSI if the
|
||||
// output is a terminal.
|
||||
// - NO_COLOR takes precedence over CLICOLOR/CLICOLOR_FORCE, and will disable
|
||||
// colors but not text decoration, i.e. bold, italic, faint, etc.
|
||||
//
|
||||
// See https://no-color.org/ and https://bixense.com/clicolors/ for more information.
|
||||
func Detect(output io.Writer, env []string) Profile {
|
||||
out, ok := output.(term.File)
|
||||
isatty := ok && term.IsTerminal(out.Fd())
|
||||
environ := newEnviron(env)
|
||||
term := environ.get("TERM")
|
||||
isDumb := term == "dumb"
|
||||
envp := colorProfile(isatty, environ)
|
||||
if envp == TrueColor || envNoColor(environ) {
|
||||
// We already know we have TrueColor, or NO_COLOR is set.
|
||||
return envp
|
||||
}
|
||||
|
||||
if isatty && !isDumb {
|
||||
tip := Terminfo(term)
|
||||
tmuxp := tmux(environ)
|
||||
|
||||
// Color profile is the maximum of env, terminfo, and tmux.
|
||||
return max(envp, max(tip, tmuxp))
|
||||
}
|
||||
|
||||
return envp
|
||||
}
|
||||
|
||||
// Env returns the color profile based on the terminal environment variables.
|
||||
// This respects NO_COLOR, CLICOLOR, and CLICOLOR_FORCE environment variables.
|
||||
//
|
||||
// The rules as follows:
|
||||
// - TERM=dumb is always treated as NoTTY unless CLICOLOR_FORCE=1 is set.
|
||||
// - If COLORTERM=truecolor, and the profile is not NoTTY, it gest upgraded to TrueColor.
|
||||
// - Using any 256 color terminal (e.g. TERM=xterm-256color) will set the profile to ANSI256.
|
||||
// - Using any color terminal (e.g. TERM=xterm-color) will set the profile to ANSI.
|
||||
// - Using CLICOLOR=1 without TERM defined should be treated as ANSI if the
|
||||
// output is a terminal.
|
||||
// - NO_COLOR takes precedence over CLICOLOR/CLICOLOR_FORCE, and will disable
|
||||
// colors but not text decoration, i.e. bold, italic, faint, etc.
|
||||
//
|
||||
// See https://no-color.org/ and https://bixense.com/clicolors/ for more information.
|
||||
func Env(env []string) (p Profile) {
|
||||
return colorProfile(true, newEnviron(env))
|
||||
}
|
||||
|
||||
func colorProfile(isatty bool, env environ) (p Profile) {
|
||||
isDumb := env.get("TERM") == "dumb"
|
||||
envp := envColorProfile(env)
|
||||
if !isatty || isDumb {
|
||||
// Check if the output is a terminal.
|
||||
// Treat dumb terminals as NoTTY
|
||||
p = NoTTY
|
||||
} else {
|
||||
p = envp
|
||||
}
|
||||
|
||||
if envNoColor(env) && isatty {
|
||||
if p > Ascii {
|
||||
p = Ascii
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if cliColorForced(env) {
|
||||
if p < ANSI {
|
||||
p = ANSI
|
||||
}
|
||||
if envp > p {
|
||||
p = envp
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if cliColor(env) {
|
||||
if isatty && !isDumb && p < ANSI {
|
||||
p = ANSI
|
||||
}
|
||||
}
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
// envNoColor returns true if the environment variables explicitly disable color output
|
||||
// by setting NO_COLOR (https://no-color.org/).
|
||||
func envNoColor(env environ) bool {
|
||||
noColor, _ := strconv.ParseBool(env.get("NO_COLOR"))
|
||||
return noColor
|
||||
}
|
||||
|
||||
func cliColor(env environ) bool {
|
||||
cliColor, _ := strconv.ParseBool(env.get("CLICOLOR"))
|
||||
return cliColor
|
||||
}
|
||||
|
||||
func cliColorForced(env environ) bool {
|
||||
cliColorForce, _ := strconv.ParseBool(env.get("CLICOLOR_FORCE"))
|
||||
return cliColorForce
|
||||
}
|
||||
|
||||
func colorTerm(env environ) bool {
|
||||
colorTerm := strings.ToLower(env.get("COLORTERM"))
|
||||
return colorTerm == "truecolor" || colorTerm == "24bit" ||
|
||||
colorTerm == "yes" || colorTerm == "true"
|
||||
}
|
||||
|
||||
// envColorProfile returns infers the color profile from the environment.
|
||||
func envColorProfile(env environ) (p Profile) {
|
||||
term, ok := env.lookup("TERM")
|
||||
if !ok || len(term) == 0 || term == "dumb" {
|
||||
p = NoTTY
|
||||
if runtime.GOOS == "windows" {
|
||||
// Use Windows API to detect color profile. Windows Terminal and
|
||||
// cmd.exe don't define $TERM.
|
||||
if wcp, ok := windowsColorProfile(env); ok {
|
||||
p = wcp
|
||||
}
|
||||
}
|
||||
} else {
|
||||
p = ANSI
|
||||
}
|
||||
|
||||
parts := strings.Split(term, "-")
|
||||
switch parts[0] {
|
||||
case "alacritty",
|
||||
"contour",
|
||||
"foot",
|
||||
"ghostty",
|
||||
"kitty",
|
||||
"rio",
|
||||
"st",
|
||||
"wezterm":
|
||||
return TrueColor
|
||||
case "xterm":
|
||||
if len(parts) > 1 {
|
||||
switch parts[1] {
|
||||
case "ghostty", "kitty":
|
||||
// These terminals can be defined as xterm-TERMNAME
|
||||
return TrueColor
|
||||
}
|
||||
}
|
||||
case "tmux", "screen":
|
||||
if p < ANSI256 {
|
||||
p = ANSI256
|
||||
}
|
||||
}
|
||||
|
||||
if isCloudShell, _ := strconv.ParseBool(env.get("GOOGLE_CLOUD_SHELL")); isCloudShell {
|
||||
return TrueColor
|
||||
}
|
||||
|
||||
// GNU Screen doesn't support TrueColor
|
||||
// Tmux doesn't support $COLORTERM
|
||||
if colorTerm(env) && !strings.HasPrefix(term, "screen") && !strings.HasPrefix(term, "tmux") {
|
||||
return TrueColor
|
||||
}
|
||||
|
||||
if strings.HasSuffix(term, "256color") && p < ANSI256 {
|
||||
p = ANSI256
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Terminfo returns the color profile based on the terminal's terminfo
|
||||
// database. This relies on the Tc and RGB capabilities to determine if the
|
||||
// terminal supports TrueColor.
|
||||
// If term is empty or "dumb", it returns NoTTY.
|
||||
func Terminfo(term string) (p Profile) {
|
||||
if len(term) == 0 || term == "dumb" {
|
||||
return NoTTY
|
||||
}
|
||||
|
||||
p = ANSI
|
||||
ti, err := terminfo.Load(term)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
extbools := ti.ExtBoolCapsShort()
|
||||
if _, ok := extbools["Tc"]; ok {
|
||||
return TrueColor
|
||||
}
|
||||
|
||||
if _, ok := extbools["RGB"]; ok {
|
||||
return TrueColor
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Tmux returns the color profile based on `tmux info` output. Tmux supports
|
||||
// overriding the terminal's color capabilities, so this function will return
|
||||
// the color profile based on the tmux configuration.
|
||||
func Tmux(env []string) Profile {
|
||||
return tmux(newEnviron(env))
|
||||
}
|
||||
|
||||
// tmux returns the color profile based on the tmux environment variables.
|
||||
func tmux(env environ) (p Profile) {
|
||||
if tmux, ok := env.lookup("TMUX"); !ok || len(tmux) == 0 {
|
||||
// Not in tmux
|
||||
return NoTTY
|
||||
}
|
||||
|
||||
// Check if tmux has either Tc or RGB capabilities. Otherwise, return
|
||||
// ANSI256.
|
||||
p = ANSI256
|
||||
cmd := exec.Command("tmux", "info")
|
||||
out, err := cmd.Output()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for _, line := range bytes.Split(out, []byte("\n")) {
|
||||
if (bytes.Contains(line, []byte("Tc")) || bytes.Contains(line, []byte("RGB"))) &&
|
||||
bytes.Contains(line, []byte("true")) {
|
||||
return TrueColor
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// environ is a map of environment variables.
|
||||
type environ map[string]string
|
||||
|
||||
// newEnviron returns a new environment map from a slice of environment
|
||||
// variables.
|
||||
func newEnviron(environ []string) environ {
|
||||
m := make(map[string]string, len(environ))
|
||||
for _, e := range environ {
|
||||
parts := strings.SplitN(e, "=", 2)
|
||||
var value string
|
||||
if len(parts) == 2 {
|
||||
value = parts[1]
|
||||
}
|
||||
m[parts[0]] = value
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// lookup returns the value of an environment variable and a boolean indicating
|
||||
// if it exists.
|
||||
func (e environ) lookup(key string) (string, bool) {
|
||||
v, ok := e[key]
|
||||
return v, ok
|
||||
}
|
||||
|
||||
// get returns the value of an environment variable and empty string if it
|
||||
// doesn't exist.
|
||||
func (e environ) get(key string) string {
|
||||
v, _ := e.lookup(key)
|
||||
return v
|
||||
}
|
||||
|
||||
func max[T ~byte | ~int](a, b T) T {
|
||||
if a > b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
8
vendor/github.com/charmbracelet/colorprofile/env_other.go
generated
vendored
Normal file
8
vendor/github.com/charmbracelet/colorprofile/env_other.go
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package colorprofile
|
||||
|
||||
func windowsColorProfile(map[string]string) (Profile, bool) {
|
||||
return 0, false
|
||||
}
|
45
vendor/github.com/charmbracelet/colorprofile/env_windows.go
generated
vendored
Normal file
45
vendor/github.com/charmbracelet/colorprofile/env_windows.go
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
package colorprofile
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
func windowsColorProfile(env map[string]string) (Profile, bool) {
|
||||
if env["ConEmuANSI"] == "ON" {
|
||||
return TrueColor, true
|
||||
}
|
||||
|
||||
if len(env["WT_SESSION"]) > 0 {
|
||||
// Windows Terminal supports TrueColor
|
||||
return TrueColor, true
|
||||
}
|
||||
|
||||
major, _, build := windows.RtlGetNtVersionNumbers()
|
||||
if build < 10586 || major < 10 {
|
||||
// No ANSI support before WindowsNT 10 build 10586
|
||||
if len(env["ANSICON"]) > 0 {
|
||||
ansiconVer := env["ANSICON_VER"]
|
||||
cv, err := strconv.Atoi(ansiconVer)
|
||||
if err != nil || cv < 181 {
|
||||
// No 8 bit color support before ANSICON 1.81
|
||||
return ANSI, true
|
||||
}
|
||||
|
||||
return ANSI256, true
|
||||
}
|
||||
|
||||
return NoTTY, true
|
||||
}
|
||||
|
||||
if build < 14931 {
|
||||
// No true color support before build 14931
|
||||
return ANSI256, true
|
||||
}
|
||||
|
||||
return TrueColor, true
|
||||
}
|
399
vendor/github.com/charmbracelet/colorprofile/profile.go
generated
vendored
Normal file
399
vendor/github.com/charmbracelet/colorprofile/profile.go
generated
vendored
Normal file
@ -0,0 +1,399 @@
|
||||
package colorprofile
|
||||
|
||||
import (
|
||||
"image/color"
|
||||
"math"
|
||||
|
||||
"github.com/charmbracelet/x/ansi"
|
||||
"github.com/lucasb-eyer/go-colorful"
|
||||
)
|
||||
|
||||
// Profile is a color profile: NoTTY, Ascii, ANSI, ANSI256, or TrueColor.
|
||||
type Profile byte
|
||||
|
||||
const (
|
||||
// NoTTY, not a terminal profile.
|
||||
NoTTY Profile = iota
|
||||
// Ascii, uncolored profile.
|
||||
Ascii //nolint:revive
|
||||
// ANSI, 4-bit color profile.
|
||||
ANSI
|
||||
// ANSI256, 8-bit color profile.
|
||||
ANSI256
|
||||
// TrueColor, 24-bit color profile.
|
||||
TrueColor
|
||||
)
|
||||
|
||||
// String returns the string representation of a Profile.
|
||||
func (p Profile) String() string {
|
||||
switch p {
|
||||
case TrueColor:
|
||||
return "TrueColor"
|
||||
case ANSI256:
|
||||
return "ANSI256"
|
||||
case ANSI:
|
||||
return "ANSI"
|
||||
case Ascii:
|
||||
return "Ascii"
|
||||
case NoTTY:
|
||||
return "NoTTY"
|
||||
}
|
||||
return "Unknown"
|
||||
}
|
||||
|
||||
// Convert transforms a given Color to a Color supported within the Profile.
|
||||
func (p Profile) Convert(c color.Color) color.Color {
|
||||
if p <= Ascii {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch c := c.(type) {
|
||||
case ansi.BasicColor:
|
||||
return c
|
||||
|
||||
case ansi.ExtendedColor:
|
||||
if p == ANSI {
|
||||
return ansi256ToANSIColor(c)
|
||||
}
|
||||
return c
|
||||
|
||||
case ansi.TrueColor, color.Color:
|
||||
h, ok := colorful.MakeColor(c)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if p != TrueColor {
|
||||
ac := hexToANSI256Color(h)
|
||||
if p == ANSI {
|
||||
return ansi256ToANSIColor(ac)
|
||||
}
|
||||
return ac
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func hexToANSI256Color(c colorful.Color) ansi.ExtendedColor {
|
||||
v2ci := func(v float64) int {
|
||||
if v < 48 {
|
||||
return 0
|
||||
}
|
||||
if v < 115 {
|
||||
return 1
|
||||
}
|
||||
return int((v - 35) / 40)
|
||||
}
|
||||
|
||||
// Calculate the nearest 0-based color index at 16..231
|
||||
r := v2ci(c.R * 255.0) // 0..5 each
|
||||
g := v2ci(c.G * 255.0)
|
||||
b := v2ci(c.B * 255.0)
|
||||
ci := 36*r + 6*g + b /* 0..215 */
|
||||
|
||||
// Calculate the represented colors back from the index
|
||||
i2cv := [6]int{0, 0x5f, 0x87, 0xaf, 0xd7, 0xff}
|
||||
cr := i2cv[r] // r/g/b, 0..255 each
|
||||
cg := i2cv[g]
|
||||
cb := i2cv[b]
|
||||
|
||||
// Calculate the nearest 0-based gray index at 232..255
|
||||
var grayIdx int
|
||||
average := (cr + cg + cb) / 3
|
||||
if average > 238 {
|
||||
grayIdx = 23
|
||||
} else {
|
||||
grayIdx = (average - 3) / 10 // 0..23
|
||||
}
|
||||
gv := 8 + 10*grayIdx // same value for r/g/b, 0..255
|
||||
|
||||
// Return the one which is nearer to the original input rgb value
|
||||
c2 := colorful.Color{R: float64(cr) / 255.0, G: float64(cg) / 255.0, B: float64(cb) / 255.0}
|
||||
g2 := colorful.Color{R: float64(gv) / 255.0, G: float64(gv) / 255.0, B: float64(gv) / 255.0}
|
||||
colorDist := c.DistanceHSLuv(c2)
|
||||
grayDist := c.DistanceHSLuv(g2)
|
||||
|
||||
if colorDist <= grayDist {
|
||||
return ansi.ExtendedColor(16 + ci) //nolint:gosec
|
||||
}
|
||||
return ansi.ExtendedColor(232 + grayIdx) //nolint:gosec
|
||||
}
|
||||
|
||||
func ansi256ToANSIColor(c ansi.ExtendedColor) ansi.BasicColor {
|
||||
var r int
|
||||
md := math.MaxFloat64
|
||||
|
||||
h, _ := colorful.Hex(ansiHex[c])
|
||||
for i := 0; i <= 15; i++ {
|
||||
hb, _ := colorful.Hex(ansiHex[i])
|
||||
d := h.DistanceHSLuv(hb)
|
||||
|
||||
if d < md {
|
||||
md = d
|
||||
r = i
|
||||
}
|
||||
}
|
||||
|
||||
return ansi.BasicColor(r) //nolint:gosec
|
||||
}
|
||||
|
||||
// RGB values of ANSI colors (0-255).
|
||||
var ansiHex = []string{
|
||||
"#000000",
|
||||
"#800000",
|
||||
"#008000",
|
||||
"#808000",
|
||||
"#000080",
|
||||
"#800080",
|
||||
"#008080",
|
||||
"#c0c0c0",
|
||||
"#808080",
|
||||
"#ff0000",
|
||||
"#00ff00",
|
||||
"#ffff00",
|
||||
"#0000ff",
|
||||
"#ff00ff",
|
||||
"#00ffff",
|
||||
"#ffffff",
|
||||
"#000000",
|
||||
"#00005f",
|
||||
"#000087",
|
||||
"#0000af",
|
||||
"#0000d7",
|
||||
"#0000ff",
|
||||
"#005f00",
|
||||
"#005f5f",
|
||||
"#005f87",
|
||||
"#005faf",
|
||||
"#005fd7",
|
||||
"#005fff",
|
||||
"#008700",
|
||||
"#00875f",
|
||||
"#008787",
|
||||
"#0087af",
|
||||
"#0087d7",
|
||||
"#0087ff",
|
||||
"#00af00",
|
||||
"#00af5f",
|
||||
"#00af87",
|
||||
"#00afaf",
|
||||
"#00afd7",
|
||||
"#00afff",
|
||||
"#00d700",
|
||||
"#00d75f",
|
||||
"#00d787",
|
||||
"#00d7af",
|
||||
"#00d7d7",
|
||||
"#00d7ff",
|
||||
"#00ff00",
|
||||
"#00ff5f",
|
||||
"#00ff87",
|
||||
"#00ffaf",
|
||||
"#00ffd7",
|
||||
"#00ffff",
|
||||
"#5f0000",
|
||||
"#5f005f",
|
||||
"#5f0087",
|
||||
"#5f00af",
|
||||
"#5f00d7",
|
||||
"#5f00ff",
|
||||
"#5f5f00",
|
||||
"#5f5f5f",
|
||||
"#5f5f87",
|
||||
"#5f5faf",
|
||||
"#5f5fd7",
|
||||
"#5f5fff",
|
||||
"#5f8700",
|
||||
"#5f875f",
|
||||
"#5f8787",
|
||||
"#5f87af",
|
||||
"#5f87d7",
|
||||
"#5f87ff",
|
||||
"#5faf00",
|
||||
"#5faf5f",
|
||||
"#5faf87",
|
||||
"#5fafaf",
|
||||
"#5fafd7",
|
||||
"#5fafff",
|
||||
"#5fd700",
|
||||
"#5fd75f",
|
||||
"#5fd787",
|
||||
"#5fd7af",
|
||||
"#5fd7d7",
|
||||
"#5fd7ff",
|
||||
"#5fff00",
|
||||
"#5fff5f",
|
||||
"#5fff87",
|
||||
"#5fffaf",
|
||||
"#5fffd7",
|
||||
"#5fffff",
|
||||
"#870000",
|
||||
"#87005f",
|
||||
"#870087",
|
||||
"#8700af",
|
||||
"#8700d7",
|
||||
"#8700ff",
|
||||
"#875f00",
|
||||
"#875f5f",
|
||||
"#875f87",
|
||||
"#875faf",
|
||||
"#875fd7",
|
||||
"#875fff",
|
||||
"#878700",
|
||||
"#87875f",
|
||||
"#878787",
|
||||
"#8787af",
|
||||
"#8787d7",
|
||||
"#8787ff",
|
||||
"#87af00",
|
||||
"#87af5f",
|
||||
"#87af87",
|
||||
"#87afaf",
|
||||
"#87afd7",
|
||||
"#87afff",
|
||||
"#87d700",
|
||||
"#87d75f",
|
||||
"#87d787",
|
||||
"#87d7af",
|
||||
"#87d7d7",
|
||||
"#87d7ff",
|
||||
"#87ff00",
|
||||
"#87ff5f",
|
||||
"#87ff87",
|
||||
"#87ffaf",
|
||||
"#87ffd7",
|
||||
"#87ffff",
|
||||
"#af0000",
|
||||
"#af005f",
|
||||
"#af0087",
|
||||
"#af00af",
|
||||
"#af00d7",
|
||||
"#af00ff",
|
||||
"#af5f00",
|
||||
"#af5f5f",
|
||||
"#af5f87",
|
||||
"#af5faf",
|
||||
"#af5fd7",
|
||||
"#af5fff",
|
||||
"#af8700",
|
||||
"#af875f",
|
||||
"#af8787",
|
||||
"#af87af",
|
||||
"#af87d7",
|
||||
"#af87ff",
|
||||
"#afaf00",
|
||||
"#afaf5f",
|
||||
"#afaf87",
|
||||
"#afafaf",
|
||||
"#afafd7",
|
||||
"#afafff",
|
||||
"#afd700",
|
||||
"#afd75f",
|
||||
"#afd787",
|
||||
"#afd7af",
|
||||
"#afd7d7",
|
||||
"#afd7ff",
|
||||
"#afff00",
|
||||
"#afff5f",
|
||||
"#afff87",
|
||||
"#afffaf",
|
||||
"#afffd7",
|
||||
"#afffff",
|
||||
"#d70000",
|
||||
"#d7005f",
|
||||
"#d70087",
|
||||
"#d700af",
|
||||
"#d700d7",
|
||||
"#d700ff",
|
||||
"#d75f00",
|
||||
"#d75f5f",
|
||||
"#d75f87",
|
||||
"#d75faf",
|
||||
"#d75fd7",
|
||||
"#d75fff",
|
||||
"#d78700",
|
||||
"#d7875f",
|
||||
"#d78787",
|
||||
"#d787af",
|
||||
"#d787d7",
|
||||
"#d787ff",
|
||||
"#d7af00",
|
||||
"#d7af5f",
|
||||
"#d7af87",
|
||||
"#d7afaf",
|
||||
"#d7afd7",
|
||||
"#d7afff",
|
||||
"#d7d700",
|
||||
"#d7d75f",
|
||||
"#d7d787",
|
||||
"#d7d7af",
|
||||
"#d7d7d7",
|
||||
"#d7d7ff",
|
||||
"#d7ff00",
|
||||
"#d7ff5f",
|
||||
"#d7ff87",
|
||||
"#d7ffaf",
|
||||
"#d7ffd7",
|
||||
"#d7ffff",
|
||||
"#ff0000",
|
||||
"#ff005f",
|
||||
"#ff0087",
|
||||
"#ff00af",
|
||||
"#ff00d7",
|
||||
"#ff00ff",
|
||||
"#ff5f00",
|
||||
"#ff5f5f",
|
||||
"#ff5f87",
|
||||
"#ff5faf",
|
||||
"#ff5fd7",
|
||||
"#ff5fff",
|
||||
"#ff8700",
|
||||
"#ff875f",
|
||||
"#ff8787",
|
||||
"#ff87af",
|
||||
"#ff87d7",
|
||||
"#ff87ff",
|
||||
"#ffaf00",
|
||||
"#ffaf5f",
|
||||
"#ffaf87",
|
||||
"#ffafaf",
|
||||
"#ffafd7",
|
||||
"#ffafff",
|
||||
"#ffd700",
|
||||
"#ffd75f",
|
||||
"#ffd787",
|
||||
"#ffd7af",
|
||||
"#ffd7d7",
|
||||
"#ffd7ff",
|
||||
"#ffff00",
|
||||
"#ffff5f",
|
||||
"#ffff87",
|
||||
"#ffffaf",
|
||||
"#ffffd7",
|
||||
"#ffffff",
|
||||
"#080808",
|
||||
"#121212",
|
||||
"#1c1c1c",
|
||||
"#262626",
|
||||
"#303030",
|
||||
"#3a3a3a",
|
||||
"#444444",
|
||||
"#4e4e4e",
|
||||
"#585858",
|
||||
"#626262",
|
||||
"#6c6c6c",
|
||||
"#767676",
|
||||
"#808080",
|
||||
"#8a8a8a",
|
||||
"#949494",
|
||||
"#9e9e9e",
|
||||
"#a8a8a8",
|
||||
"#b2b2b2",
|
||||
"#bcbcbc",
|
||||
"#c6c6c6",
|
||||
"#d0d0d0",
|
||||
"#dadada",
|
||||
"#e4e4e4",
|
||||
"#eeeeee",
|
||||
}
|
166
vendor/github.com/charmbracelet/colorprofile/writer.go
generated
vendored
Normal file
166
vendor/github.com/charmbracelet/colorprofile/writer.go
generated
vendored
Normal file
@ -0,0 +1,166 @@
|
||||
package colorprofile
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"image/color"
|
||||
"io"
|
||||
"strconv"
|
||||
|
||||
"github.com/charmbracelet/x/ansi"
|
||||
)
|
||||
|
||||
// NewWriter creates a new color profile writer that downgrades color sequences
|
||||
// based on the detected color profile.
|
||||
//
|
||||
// If environ is nil, it will use os.Environ() to get the environment variables.
|
||||
//
|
||||
// It queries the given writer to determine if it supports ANSI escape codes.
|
||||
// If it does, along with the given environment variables, it will determine
|
||||
// the appropriate color profile to use for color formatting.
|
||||
//
|
||||
// This respects the NO_COLOR, CLICOLOR, and CLICOLOR_FORCE environment variables.
|
||||
func NewWriter(w io.Writer, environ []string) *Writer {
|
||||
return &Writer{
|
||||
Forward: w,
|
||||
Profile: Detect(w, environ),
|
||||
}
|
||||
}
|
||||
|
||||
// Writer represents a color profile writer that writes ANSI sequences to the
|
||||
// underlying writer.
|
||||
type Writer struct {
|
||||
Forward io.Writer
|
||||
Profile Profile
|
||||
}
|
||||
|
||||
// Write writes the given text to the underlying writer.
|
||||
func (w *Writer) Write(p []byte) (int, error) {
|
||||
switch w.Profile {
|
||||
case TrueColor:
|
||||
return w.Forward.Write(p)
|
||||
case NoTTY:
|
||||
return io.WriteString(w.Forward, ansi.Strip(string(p)))
|
||||
default:
|
||||
return w.downsample(p)
|
||||
}
|
||||
}
|
||||
|
||||
// downsample downgrades the given text to the appropriate color profile.
|
||||
func (w *Writer) downsample(p []byte) (int, error) {
|
||||
var buf bytes.Buffer
|
||||
var state byte
|
||||
|
||||
parser := ansi.GetParser()
|
||||
defer ansi.PutParser(parser)
|
||||
|
||||
for len(p) > 0 {
|
||||
parser.Reset()
|
||||
seq, _, read, newState := ansi.DecodeSequence(p, state, parser)
|
||||
|
||||
switch {
|
||||
case ansi.HasCsiPrefix(seq) && parser.Command() == 'm':
|
||||
handleSgr(w, parser, &buf)
|
||||
default:
|
||||
// If we're not a style SGR sequence, just write the bytes.
|
||||
if n, err := buf.Write(seq); err != nil {
|
||||
return n, err
|
||||
}
|
||||
}
|
||||
|
||||
p = p[read:]
|
||||
state = newState
|
||||
}
|
||||
|
||||
return w.Forward.Write(buf.Bytes())
|
||||
}
|
||||
|
||||
// WriteString writes the given text to the underlying writer.
|
||||
func (w *Writer) WriteString(s string) (n int, err error) {
|
||||
return w.Write([]byte(s))
|
||||
}
|
||||
|
||||
func handleSgr(w *Writer, p *ansi.Parser, buf *bytes.Buffer) {
|
||||
var style ansi.Style
|
||||
params := p.Params()
|
||||
for i := 0; i < len(params); i++ {
|
||||
param := params[i]
|
||||
|
||||
switch param := param.Param(0); param {
|
||||
case 0:
|
||||
// SGR default parameter is 0. We use an empty string to reduce the
|
||||
// number of bytes written to the buffer.
|
||||
style = append(style, "")
|
||||
case 30, 31, 32, 33, 34, 35, 36, 37: // 8-bit foreground color
|
||||
if w.Profile < ANSI {
|
||||
continue
|
||||
}
|
||||
style = style.ForegroundColor(
|
||||
w.Profile.Convert(ansi.BasicColor(param - 30))) //nolint:gosec
|
||||
case 38: // 16 or 24-bit foreground color
|
||||
var c color.Color
|
||||
if n := ansi.ReadStyleColor(params[i:], &c); n > 0 {
|
||||
i += n - 1
|
||||
}
|
||||
if w.Profile < ANSI {
|
||||
continue
|
||||
}
|
||||
style = style.ForegroundColor(w.Profile.Convert(c))
|
||||
case 39: // default foreground color
|
||||
if w.Profile < ANSI {
|
||||
continue
|
||||
}
|
||||
style = style.DefaultForegroundColor()
|
||||
case 40, 41, 42, 43, 44, 45, 46, 47: // 8-bit background color
|
||||
if w.Profile < ANSI {
|
||||
continue
|
||||
}
|
||||
style = style.BackgroundColor(
|
||||
w.Profile.Convert(ansi.BasicColor(param - 40))) //nolint:gosec
|
||||
case 48: // 16 or 24-bit background color
|
||||
var c color.Color
|
||||
if n := ansi.ReadStyleColor(params[i:], &c); n > 0 {
|
||||
i += n - 1
|
||||
}
|
||||
if w.Profile < ANSI {
|
||||
continue
|
||||
}
|
||||
style = style.BackgroundColor(w.Profile.Convert(c))
|
||||
case 49: // default background color
|
||||
if w.Profile < ANSI {
|
||||
continue
|
||||
}
|
||||
style = style.DefaultBackgroundColor()
|
||||
case 58: // 16 or 24-bit underline color
|
||||
var c color.Color
|
||||
if n := ansi.ReadStyleColor(params[i:], &c); n > 0 {
|
||||
i += n - 1
|
||||
}
|
||||
if w.Profile < ANSI {
|
||||
continue
|
||||
}
|
||||
style = style.UnderlineColor(w.Profile.Convert(c))
|
||||
case 59: // default underline color
|
||||
if w.Profile < ANSI {
|
||||
continue
|
||||
}
|
||||
style = style.DefaultUnderlineColor()
|
||||
case 90, 91, 92, 93, 94, 95, 96, 97: // 8-bit bright foreground color
|
||||
if w.Profile < ANSI {
|
||||
continue
|
||||
}
|
||||
style = style.ForegroundColor(
|
||||
w.Profile.Convert(ansi.BasicColor(param - 90 + 8))) //nolint:gosec
|
||||
case 100, 101, 102, 103, 104, 105, 106, 107: // 8-bit bright background color
|
||||
if w.Profile < ANSI {
|
||||
continue
|
||||
}
|
||||
style = style.BackgroundColor(
|
||||
w.Profile.Convert(ansi.BasicColor(param - 100 + 8))) //nolint:gosec
|
||||
default:
|
||||
// If this is not a color attribute, just append it to the style.
|
||||
style = append(style, strconv.Itoa(param))
|
||||
}
|
||||
}
|
||||
|
||||
_, _ = buf.WriteString(style.String())
|
||||
}
|
1
vendor/github.com/charmbracelet/lipgloss/.gitignore
generated
vendored
1
vendor/github.com/charmbracelet/lipgloss/.gitignore
generated
vendored
@ -1 +1,2 @@
|
||||
ssh_example_ed25519*
|
||||
dist/
|
||||
|
13
vendor/github.com/charmbracelet/lipgloss/.golangci.yml
generated
vendored
13
vendor/github.com/charmbracelet/lipgloss/.golangci.yml
generated
vendored
@ -15,10 +15,22 @@ issues:
|
||||
linters:
|
||||
enable:
|
||||
- bodyclose
|
||||
- exhaustive
|
||||
- goconst
|
||||
- godot
|
||||
- godox
|
||||
- gofumpt
|
||||
- goimports
|
||||
- gomoddirectives
|
||||
- goprintffuncname
|
||||
- gosec
|
||||
- misspell
|
||||
- nakedret
|
||||
- nestif
|
||||
- nilerr
|
||||
- noctx
|
||||
- nolintlint
|
||||
- prealloc
|
||||
- revive
|
||||
- rowserrcheck
|
||||
- sqlclosecheck
|
||||
@ -26,3 +38,4 @@ linters:
|
||||
- unconvert
|
||||
- unparam
|
||||
- whitespace
|
||||
- wrapcheck
|
||||
|
4
vendor/github.com/charmbracelet/lipgloss/.goreleaser.yml
generated
vendored
4
vendor/github.com/charmbracelet/lipgloss/.goreleaser.yml
generated
vendored
@ -1,5 +1,5 @@
|
||||
# yaml-language-server: $schema=https://goreleaser.com/static/schema-pro.json
|
||||
version: 2
|
||||
includes:
|
||||
- from_url:
|
||||
url: charmbracelet/meta/main/goreleaser-lib.yaml
|
||||
# yaml-language-server: $schema=https://goreleaser.com/static/schema-pro.json
|
||||
|
||||
|
62
vendor/github.com/charmbracelet/lipgloss/README.md
generated
vendored
62
vendor/github.com/charmbracelet/lipgloss/README.md
generated
vendored
@ -10,7 +10,7 @@
|
||||
|
||||
Style definitions for nice terminal layouts. Built with TUIs in mind.
|
||||
|
||||

|
||||

|
||||
|
||||
Lip Gloss takes an expressive, declarative approach to terminal rendering.
|
||||
Users familiar with CSS will feel at home with Lip Gloss.
|
||||
@ -425,17 +425,28 @@ rows := [][]string{
|
||||
Use the table package to style and render the table.
|
||||
|
||||
```go
|
||||
var (
|
||||
purple = lipgloss.Color("99")
|
||||
gray = lipgloss.Color("245")
|
||||
lightGray = lipgloss.Color("241")
|
||||
|
||||
headerStyle = lipgloss.NewStyle().Foreground(purple).Bold(true).Align(lipgloss.Center)
|
||||
cellStyle = lipgloss.NewStyle().Padding(0, 1).Width(14)
|
||||
oddRowStyle = cellStyle.Foreground(gray)
|
||||
evenRowStyle = cellStyle.Foreground(lightGray)
|
||||
)
|
||||
|
||||
t := table.New().
|
||||
Border(lipgloss.NormalBorder()).
|
||||
BorderStyle(lipgloss.NewStyle().Foreground(lipgloss.Color("99"))).
|
||||
BorderStyle(lipgloss.NewStyle().Foreground(purple)).
|
||||
StyleFunc(func(row, col int) lipgloss.Style {
|
||||
switch {
|
||||
case row == 0:
|
||||
return HeaderStyle
|
||||
case row == table.HeaderRow:
|
||||
return headerStyle
|
||||
case row%2 == 0:
|
||||
return EvenRowStyle
|
||||
return evenRowStyle
|
||||
default:
|
||||
return OddRowStyle
|
||||
return oddRowStyle
|
||||
}
|
||||
}).
|
||||
Headers("LANGUAGE", "FORMAL", "INFORMAL").
|
||||
@ -453,6 +464,45 @@ fmt.Println(t)
|
||||
|
||||

|
||||
|
||||
> [!WARNING]
|
||||
> Table `Rows` need to be declared before `Offset` otherwise it does nothing.
|
||||
|
||||
### Table Borders
|
||||
|
||||
There are helpers to generate tables in markdown or ASCII style:
|
||||
|
||||
#### Markdown Table
|
||||
|
||||
```go
|
||||
table.New().Border(lipgloss.MarkdownBorder()).BorderTop(false).BorderBottom(false)
|
||||
```
|
||||
|
||||
```
|
||||
| LANGUAGE | FORMAL | INFORMAL |
|
||||
|----------|--------------|-----------|
|
||||
| Chinese | Nǐn hǎo | Nǐ hǎo |
|
||||
| French | Bonjour | Salut |
|
||||
| Russian | Zdravstvuyte | Privet |
|
||||
| Spanish | Hola | ¿Qué tal? |
|
||||
```
|
||||
|
||||
#### ASCII Table
|
||||
|
||||
```go
|
||||
table.New().Border(lipgloss.ASCIIBorder())
|
||||
```
|
||||
|
||||
```
|
||||
+----------+--------------+-----------+
|
||||
| LANGUAGE | FORMAL | INFORMAL |
|
||||
+----------+--------------+-----------+
|
||||
| Chinese | Nǐn hǎo | Nǐ hǎo |
|
||||
| French | Bonjour | Salut |
|
||||
| Russian | Zdravstvuyte | Privet |
|
||||
| Spanish | Hola | ¿Qué tal? |
|
||||
+----------+--------------+-----------+
|
||||
```
|
||||
|
||||
For more on tables see [the docs](https://pkg.go.dev/github.com/charmbracelet/lipgloss?tab=doc) and [examples](https://github.com/charmbracelet/lipgloss/tree/master/examples/table).
|
||||
|
||||
## Rendering Lists
|
||||
|
19
vendor/github.com/charmbracelet/lipgloss/Taskfile.yaml
generated
vendored
Normal file
19
vendor/github.com/charmbracelet/lipgloss/Taskfile.yaml
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# https://taskfile.dev
|
||||
|
||||
version: '3'
|
||||
|
||||
tasks:
|
||||
lint:
|
||||
desc: Run base linters
|
||||
cmds:
|
||||
- golangci-lint run
|
||||
|
||||
test:
|
||||
desc: Run tests
|
||||
cmds:
|
||||
- go test ./... {{.CLI_ARGS}}
|
||||
|
||||
test:table:
|
||||
desc: Run table tests
|
||||
cmds:
|
||||
- go test ./table {{.CLI_ARGS}}
|
6
vendor/github.com/charmbracelet/lipgloss/align.go
generated
vendored
6
vendor/github.com/charmbracelet/lipgloss/align.go
generated
vendored
@ -30,8 +30,8 @@ func alignTextHorizontal(str string, pos Position, width int, style *termenv.Sty
|
||||
l = s + l
|
||||
case Center:
|
||||
// Note: remainder goes on the right.
|
||||
left := shortAmount / 2 //nolint:gomnd
|
||||
right := left + shortAmount%2 //nolint:gomnd
|
||||
left := shortAmount / 2 //nolint:mnd
|
||||
right := left + shortAmount%2 //nolint:mnd
|
||||
|
||||
leftSpaces := strings.Repeat(" ", left)
|
||||
rightSpaces := strings.Repeat(" ", right)
|
||||
@ -69,7 +69,7 @@ func alignTextVertical(str string, pos Position, height int, _ *termenv.Style) s
|
||||
case Top:
|
||||
return str + strings.Repeat("\n", height-strHeight)
|
||||
case Center:
|
||||
topPadding, bottomPadding := (height-strHeight)/2, (height-strHeight)/2 //nolint:gomnd
|
||||
topPadding, bottomPadding := (height-strHeight)/2, (height-strHeight)/2 //nolint:mnd
|
||||
if strHeight+topPadding+bottomPadding > height {
|
||||
topPadding--
|
||||
} else if strHeight+topPadding+bottomPadding < height {
|
||||
|
75
vendor/github.com/charmbracelet/lipgloss/borders.go
generated
vendored
75
vendor/github.com/charmbracelet/lipgloss/borders.go
generated
vendored
@ -100,14 +100,19 @@ var (
|
||||
}
|
||||
|
||||
blockBorder = Border{
|
||||
Top: "█",
|
||||
Bottom: "█",
|
||||
Left: "█",
|
||||
Right: "█",
|
||||
TopLeft: "█",
|
||||
TopRight: "█",
|
||||
BottomLeft: "█",
|
||||
BottomRight: "█",
|
||||
Top: "█",
|
||||
Bottom: "█",
|
||||
Left: "█",
|
||||
Right: "█",
|
||||
TopLeft: "█",
|
||||
TopRight: "█",
|
||||
BottomLeft: "█",
|
||||
BottomRight: "█",
|
||||
MiddleLeft: "█",
|
||||
MiddleRight: "█",
|
||||
Middle: "█",
|
||||
MiddleTop: "█",
|
||||
MiddleBottom: "█",
|
||||
}
|
||||
|
||||
outerHalfBlockBorder = Border{
|
||||
@ -179,6 +184,38 @@ var (
|
||||
MiddleTop: " ",
|
||||
MiddleBottom: " ",
|
||||
}
|
||||
|
||||
markdownBorder = Border{
|
||||
Top: "-",
|
||||
Bottom: "-",
|
||||
Left: "|",
|
||||
Right: "|",
|
||||
TopLeft: "|",
|
||||
TopRight: "|",
|
||||
BottomLeft: "|",
|
||||
BottomRight: "|",
|
||||
MiddleLeft: "|",
|
||||
MiddleRight: "|",
|
||||
Middle: "|",
|
||||
MiddleTop: "|",
|
||||
MiddleBottom: "|",
|
||||
}
|
||||
|
||||
asciiBorder = Border{
|
||||
Top: "-",
|
||||
Bottom: "-",
|
||||
Left: "|",
|
||||
Right: "|",
|
||||
TopLeft: "+",
|
||||
TopRight: "+",
|
||||
BottomLeft: "+",
|
||||
BottomRight: "+",
|
||||
MiddleLeft: "+",
|
||||
MiddleRight: "+",
|
||||
Middle: "+",
|
||||
MiddleTop: "+",
|
||||
MiddleBottom: "+",
|
||||
}
|
||||
)
|
||||
|
||||
// NormalBorder returns a standard-type border with a normal weight and 90
|
||||
@ -226,13 +263,23 @@ func HiddenBorder() Border {
|
||||
return hiddenBorder
|
||||
}
|
||||
|
||||
// MarkdownBorder return a table border in markdown style.
|
||||
//
|
||||
// Make sure to disable top and bottom border for the best result. This will
|
||||
// ensure that the output is valid markdown.
|
||||
//
|
||||
// table.New().Border(lipgloss.MarkdownBorder()).BorderTop(false).BorderBottom(false)
|
||||
func MarkdownBorder() Border {
|
||||
return markdownBorder
|
||||
}
|
||||
|
||||
// ASCIIBorder returns a table border with ASCII characters.
|
||||
func ASCIIBorder() Border {
|
||||
return asciiBorder
|
||||
}
|
||||
|
||||
func (s Style) applyBorder(str string) string {
|
||||
var (
|
||||
topSet = s.isSet(borderTopKey)
|
||||
rightSet = s.isSet(borderRightKey)
|
||||
bottomSet = s.isSet(borderBottomKey)
|
||||
leftSet = s.isSet(borderLeftKey)
|
||||
|
||||
border = s.getBorderStyle()
|
||||
hasTop = s.getAsBool(borderTopKey, false)
|
||||
hasRight = s.getAsBool(borderRightKey, false)
|
||||
@ -252,7 +299,7 @@ func (s Style) applyBorder(str string) string {
|
||||
|
||||
// If a border is set and no sides have been specifically turned on or off
|
||||
// render borders on all sides.
|
||||
if border != noBorder && !(topSet || rightSet || bottomSet || leftSet) {
|
||||
if s.implicitBorders() {
|
||||
hasTop = true
|
||||
hasRight = true
|
||||
hasBottom = true
|
||||
|
2
vendor/github.com/charmbracelet/lipgloss/color.go
generated
vendored
2
vendor/github.com/charmbracelet/lipgloss/color.go
generated
vendored
@ -35,7 +35,7 @@ func (NoColor) color(*Renderer) termenv.Color {
|
||||
//
|
||||
// Deprecated.
|
||||
func (n NoColor) RGBA() (r, g, b, a uint32) {
|
||||
return 0x0, 0x0, 0x0, 0xFFFF //nolint:gomnd
|
||||
return 0x0, 0x0, 0x0, 0xFFFF //nolint:mnd
|
||||
}
|
||||
|
||||
// Color specifies a color by hex or ANSI value. For example:
|
||||
|
22
vendor/github.com/charmbracelet/lipgloss/get.go
generated
vendored
22
vendor/github.com/charmbracelet/lipgloss/get.go
generated
vendored
@ -300,7 +300,7 @@ func (s Style) GetBorderTopWidth() int {
|
||||
// runes of varying widths, the widest rune is returned. If no border exists on
|
||||
// the top edge, 0 is returned.
|
||||
func (s Style) GetBorderTopSize() int {
|
||||
if !s.getAsBool(borderTopKey, false) {
|
||||
if !s.getAsBool(borderTopKey, false) && !s.implicitBorders() {
|
||||
return 0
|
||||
}
|
||||
return s.getBorderStyle().GetTopSize()
|
||||
@ -310,7 +310,7 @@ func (s Style) GetBorderTopSize() int {
|
||||
// runes of varying widths, the widest rune is returned. If no border exists on
|
||||
// the left edge, 0 is returned.
|
||||
func (s Style) GetBorderLeftSize() int {
|
||||
if !s.getAsBool(borderLeftKey, false) {
|
||||
if !s.getAsBool(borderLeftKey, false) && !s.implicitBorders() {
|
||||
return 0
|
||||
}
|
||||
return s.getBorderStyle().GetLeftSize()
|
||||
@ -320,7 +320,7 @@ func (s Style) GetBorderLeftSize() int {
|
||||
// contain runes of varying widths, the widest rune is returned. If no border
|
||||
// exists on the left edge, 0 is returned.
|
||||
func (s Style) GetBorderBottomSize() int {
|
||||
if !s.getAsBool(borderBottomKey, false) {
|
||||
if !s.getAsBool(borderBottomKey, false) && !s.implicitBorders() {
|
||||
return 0
|
||||
}
|
||||
return s.getBorderStyle().GetBottomSize()
|
||||
@ -330,7 +330,7 @@ func (s Style) GetBorderBottomSize() int {
|
||||
// contain runes of varying widths, the widest rune is returned. If no border
|
||||
// exists on the right edge, 0 is returned.
|
||||
func (s Style) GetBorderRightSize() int {
|
||||
if !s.getAsBool(borderRightKey, false) {
|
||||
if !s.getAsBool(borderRightKey, false) && !s.implicitBorders() {
|
||||
return 0
|
||||
}
|
||||
return s.getBorderStyle().GetRightSize()
|
||||
@ -519,6 +519,20 @@ func (s Style) getBorderStyle() Border {
|
||||
return s.borderStyle
|
||||
}
|
||||
|
||||
// Returns whether or not the style has implicit borders. This happens when
|
||||
// a border style has been set but no border sides have been explicitly turned
|
||||
// on or off.
|
||||
func (s Style) implicitBorders() bool {
|
||||
var (
|
||||
borderStyle = s.getBorderStyle()
|
||||
topSet = s.isSet(borderTopKey)
|
||||
rightSet = s.isSet(borderRightKey)
|
||||
bottomSet = s.isSet(borderBottomKey)
|
||||
leftSet = s.isSet(borderLeftKey)
|
||||
)
|
||||
return borderStyle != noBorder && !(topSet || rightSet || bottomSet || leftSet)
|
||||
}
|
||||
|
||||
func (s Style) getAsTransform(propKey) func(string) string {
|
||||
if !s.isSet(transformKey) {
|
||||
return nil
|
||||
|
48
vendor/github.com/charmbracelet/lipgloss/ranges.go
generated
vendored
Normal file
48
vendor/github.com/charmbracelet/lipgloss/ranges.go
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
package lipgloss
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/charmbracelet/x/ansi"
|
||||
)
|
||||
|
||||
// StyleRanges allows to, given a string, style ranges of it differently.
|
||||
// The function will take into account existing styles.
|
||||
// Ranges should not overlap.
|
||||
func StyleRanges(s string, ranges ...Range) string {
|
||||
if len(ranges) == 0 {
|
||||
return s
|
||||
}
|
||||
|
||||
var buf strings.Builder
|
||||
lastIdx := 0
|
||||
stripped := ansi.Strip(s)
|
||||
|
||||
// Use Truncate and TruncateLeft to style match.MatchedIndexes without
|
||||
// losing the original option style:
|
||||
for _, rng := range ranges {
|
||||
// Add the text before this match
|
||||
if rng.Start > lastIdx {
|
||||
buf.WriteString(ansi.Cut(s, lastIdx, rng.Start))
|
||||
}
|
||||
// Add the matched range with its highlight
|
||||
buf.WriteString(rng.Style.Render(ansi.Cut(stripped, rng.Start, rng.End)))
|
||||
lastIdx = rng.End
|
||||
}
|
||||
|
||||
// Add any remaining text after the last match
|
||||
buf.WriteString(ansi.TruncateLeft(s, lastIdx, ""))
|
||||
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// NewRange returns a range that can be used with [StyleRanges].
|
||||
func NewRange(start, end int, style Style) Range {
|
||||
return Range{start, end, style}
|
||||
}
|
||||
|
||||
// Range to be used with [StyleRanges].
|
||||
type Range struct {
|
||||
Start, End int
|
||||
Style Style
|
||||
}
|
18
vendor/github.com/charmbracelet/lipgloss/set.go
generated
vendored
18
vendor/github.com/charmbracelet/lipgloss/set.go
generated
vendored
@ -710,19 +710,19 @@ func whichSidesInt(i ...int) (top, right, bottom, left int, ok bool) {
|
||||
left = i[0]
|
||||
right = i[0]
|
||||
ok = true
|
||||
case 2: //nolint:gomnd
|
||||
case 2: //nolint:mnd
|
||||
top = i[0]
|
||||
bottom = i[0]
|
||||
left = i[1]
|
||||
right = i[1]
|
||||
ok = true
|
||||
case 3: //nolint:gomnd
|
||||
case 3: //nolint:mnd
|
||||
top = i[0]
|
||||
left = i[1]
|
||||
right = i[1]
|
||||
bottom = i[2]
|
||||
ok = true
|
||||
case 4: //nolint:gomnd
|
||||
case 4: //nolint:mnd
|
||||
top = i[0]
|
||||
right = i[1]
|
||||
bottom = i[2]
|
||||
@ -743,19 +743,19 @@ func whichSidesBool(i ...bool) (top, right, bottom, left bool, ok bool) {
|
||||
left = i[0]
|
||||
right = i[0]
|
||||
ok = true
|
||||
case 2: //nolint:gomnd
|
||||
case 2: //nolint:mnd
|
||||
top = i[0]
|
||||
bottom = i[0]
|
||||
left = i[1]
|
||||
right = i[1]
|
||||
ok = true
|
||||
case 3: //nolint:gomnd
|
||||
case 3: //nolint:mnd
|
||||
top = i[0]
|
||||
left = i[1]
|
||||
right = i[1]
|
||||
bottom = i[2]
|
||||
ok = true
|
||||
case 4: //nolint:gomnd
|
||||
case 4: //nolint:mnd
|
||||
top = i[0]
|
||||
right = i[1]
|
||||
bottom = i[2]
|
||||
@ -776,19 +776,19 @@ func whichSidesColor(i ...TerminalColor) (top, right, bottom, left TerminalColor
|
||||
left = i[0]
|
||||
right = i[0]
|
||||
ok = true
|
||||
case 2: //nolint:gomnd
|
||||
case 2: //nolint:mnd
|
||||
top = i[0]
|
||||
bottom = i[0]
|
||||
left = i[1]
|
||||
right = i[1]
|
||||
ok = true
|
||||
case 3: //nolint:gomnd
|
||||
case 3: //nolint:mnd
|
||||
top = i[0]
|
||||
left = i[1]
|
||||
right = i[1]
|
||||
bottom = i[2]
|
||||
ok = true
|
||||
case 4: //nolint:gomnd
|
||||
case 4: //nolint:mnd
|
||||
top = i[0]
|
||||
right = i[1]
|
||||
bottom = i[2]
|
||||
|
5
vendor/github.com/charmbracelet/lipgloss/style.go
generated
vendored
5
vendor/github.com/charmbracelet/lipgloss/style.go
generated
vendored
@ -5,6 +5,7 @@ import (
|
||||
"unicode"
|
||||
|
||||
"github.com/charmbracelet/x/ansi"
|
||||
"github.com/charmbracelet/x/cellbuf"
|
||||
"github.com/muesli/termenv"
|
||||
)
|
||||
|
||||
@ -364,7 +365,7 @@ func (s Style) Render(strs ...string) string {
|
||||
// Word wrap
|
||||
if !inline && width > 0 {
|
||||
wrapAt := width - leftPadding - rightPadding
|
||||
str = ansi.Wrap(str, wrapAt, "")
|
||||
str = cellbuf.Wrap(str, wrapAt, "")
|
||||
}
|
||||
|
||||
// Render core text
|
||||
@ -431,7 +432,7 @@ func (s Style) Render(strs ...string) string {
|
||||
{
|
||||
numLines := strings.Count(str, "\n")
|
||||
|
||||
if !(numLines == 0 && width == 0) {
|
||||
if numLines != 0 || width != 0 {
|
||||
var st *termenv.Style
|
||||
if colorWhitespace || styleWhitespace {
|
||||
st = &teWhitespace
|
||||
|
418
vendor/github.com/charmbracelet/lipgloss/table/resizing.go
generated
vendored
Normal file
418
vendor/github.com/charmbracelet/lipgloss/table/resizing.go
generated
vendored
Normal file
@ -0,0 +1,418 @@
|
||||
package table
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strings"
|
||||
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/charmbracelet/x/ansi"
|
||||
)
|
||||
|
||||
// resize resizes the table to fit the specified width.
|
||||
//
|
||||
// Given a user defined table width, we must ensure the table is exactly that
|
||||
// width. This must account for all borders, column, separators, and column
|
||||
// data.
|
||||
//
|
||||
// In the case where the table is narrower than the specified table width,
|
||||
// we simply expand the columns evenly to fit the width.
|
||||
// For example, a table with 3 columns takes up 50 characters total, and the
|
||||
// width specified is 80, we expand each column by 10 characters, adding 30
|
||||
// to the total width.
|
||||
//
|
||||
// In the case where the table is wider than the specified table width, we
|
||||
// _could_ simply shrink the columns evenly but this would result in data
|
||||
// being truncated (perhaps unnecessarily). The naive approach could result
|
||||
// in very poor cropping of the table data. So, instead of shrinking columns
|
||||
// evenly, we calculate the median non-whitespace length of each column, and
|
||||
// shrink the columns based on the largest median.
|
||||
//
|
||||
// For example,
|
||||
//
|
||||
// ┌──────┬───────────────┬──────────┐
|
||||
// │ Name │ Age of Person │ Location │
|
||||
// ├──────┼───────────────┼──────────┤
|
||||
// │ Kini │ 40 │ New York │
|
||||
// │ Eli │ 30 │ London │
|
||||
// │ Iris │ 20 │ Paris │
|
||||
// └──────┴───────────────┴──────────┘
|
||||
//
|
||||
// Median non-whitespace length vs column width of each column:
|
||||
//
|
||||
// Name: 4 / 5
|
||||
// Age of Person: 2 / 15
|
||||
// Location: 6 / 10
|
||||
//
|
||||
// The biggest difference is 15 - 2, so we can shrink the 2nd column by 13.
|
||||
func (t *Table) resize() {
|
||||
hasHeaders := len(t.headers) > 0
|
||||
rows := dataToMatrix(t.data)
|
||||
r := newResizer(t.width, t.height, t.headers, rows)
|
||||
r.wrap = t.wrap
|
||||
r.borderColumn = t.borderColumn
|
||||
r.yPaddings = make([][]int, len(r.allRows))
|
||||
|
||||
var allRows [][]string
|
||||
if hasHeaders {
|
||||
allRows = append([][]string{t.headers}, rows...)
|
||||
} else {
|
||||
allRows = rows
|
||||
}
|
||||
|
||||
r.rowHeights = r.defaultRowHeights()
|
||||
|
||||
for i, row := range allRows {
|
||||
r.yPaddings[i] = make([]int, len(row))
|
||||
|
||||
for j := range row {
|
||||
column := &r.columns[j]
|
||||
|
||||
// Making sure we're passing the right index to `styleFunc`. The header row should be `-1` and
|
||||
// the others should start from `0`.
|
||||
rowIndex := i
|
||||
if hasHeaders {
|
||||
rowIndex--
|
||||
}
|
||||
style := t.styleFunc(rowIndex, j)
|
||||
|
||||
topMargin, rightMargin, bottomMargin, leftMargin := style.GetMargin()
|
||||
topPadding, rightPadding, bottomPadding, leftPadding := style.GetPadding()
|
||||
|
||||
totalHorizontalPadding := leftMargin + rightMargin + leftPadding + rightPadding
|
||||
column.xPadding = max(column.xPadding, totalHorizontalPadding)
|
||||
column.fixedWidth = max(column.fixedWidth, style.GetWidth())
|
||||
|
||||
r.rowHeights[i] = max(r.rowHeights[i], style.GetHeight())
|
||||
|
||||
totalVerticalPadding := topMargin + bottomMargin + topPadding + bottomPadding
|
||||
r.yPaddings[i][j] = totalVerticalPadding
|
||||
}
|
||||
}
|
||||
|
||||
// A table width wasn't specified. In this case, detect according to
|
||||
// content width.
|
||||
if r.tableWidth <= 0 {
|
||||
r.tableWidth = r.detectTableWidth()
|
||||
}
|
||||
|
||||
t.widths, t.heights = r.optimizedWidths()
|
||||
}
|
||||
|
||||
// resizerColumn is a column in the resizer.
|
||||
type resizerColumn struct {
|
||||
index int
|
||||
min int
|
||||
max int
|
||||
median int
|
||||
rows [][]string
|
||||
xPadding int // horizontal padding
|
||||
fixedWidth int
|
||||
}
|
||||
|
||||
// resizer is a table resizer.
|
||||
type resizer struct {
|
||||
tableWidth int
|
||||
tableHeight int
|
||||
headers []string
|
||||
allRows [][]string
|
||||
rowHeights []int
|
||||
columns []resizerColumn
|
||||
|
||||
wrap bool
|
||||
borderColumn bool
|
||||
yPaddings [][]int // vertical paddings
|
||||
}
|
||||
|
||||
// newResizer creates a new resizer.
|
||||
func newResizer(tableWidth, tableHeight int, headers []string, rows [][]string) *resizer {
|
||||
r := &resizer{
|
||||
tableWidth: tableWidth,
|
||||
tableHeight: tableHeight,
|
||||
headers: headers,
|
||||
}
|
||||
|
||||
if len(headers) > 0 {
|
||||
r.allRows = append([][]string{headers}, rows...)
|
||||
} else {
|
||||
r.allRows = rows
|
||||
}
|
||||
|
||||
for _, row := range r.allRows {
|
||||
for i, cell := range row {
|
||||
cellLen := lipgloss.Width(cell)
|
||||
|
||||
// Header or first row. Just add as is.
|
||||
if len(r.columns) <= i {
|
||||
r.columns = append(r.columns, resizerColumn{
|
||||
index: i,
|
||||
min: cellLen,
|
||||
max: cellLen,
|
||||
median: cellLen,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
r.columns[i].rows = append(r.columns[i].rows, row)
|
||||
r.columns[i].min = min(r.columns[i].min, cellLen)
|
||||
r.columns[i].max = max(r.columns[i].max, cellLen)
|
||||
}
|
||||
}
|
||||
for j := range r.columns {
|
||||
widths := make([]int, len(r.columns[j].rows))
|
||||
for i, row := range r.columns[j].rows {
|
||||
widths[i] = lipgloss.Width(row[j])
|
||||
}
|
||||
r.columns[j].median = median(widths)
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
// optimizedWidths returns the optimized column widths and row heights.
|
||||
func (r *resizer) optimizedWidths() (colWidths, rowHeights []int) {
|
||||
if r.maxTotal() <= r.tableWidth {
|
||||
return r.expandTableWidth()
|
||||
}
|
||||
return r.shrinkTableWidth()
|
||||
}
|
||||
|
||||
// detectTableWidth detects the table width.
|
||||
func (r *resizer) detectTableWidth() int {
|
||||
return r.maxCharCount() + r.totalHorizontalPadding() + r.totalHorizontalBorder()
|
||||
}
|
||||
|
||||
// expandTableWidth expands the table width.
|
||||
func (r *resizer) expandTableWidth() (colWidths, rowHeights []int) {
|
||||
colWidths = r.maxColumnWidths()
|
||||
|
||||
for {
|
||||
totalWidth := sum(colWidths) + r.totalHorizontalBorder()
|
||||
if totalWidth >= r.tableWidth {
|
||||
break
|
||||
}
|
||||
|
||||
shorterColumnIndex := 0
|
||||
shorterColumnWidth := math.MaxInt32
|
||||
|
||||
for j, width := range colWidths {
|
||||
if width == r.columns[j].fixedWidth {
|
||||
continue
|
||||
}
|
||||
if width < shorterColumnWidth {
|
||||
shorterColumnWidth = width
|
||||
shorterColumnIndex = j
|
||||
}
|
||||
}
|
||||
|
||||
colWidths[shorterColumnIndex]++
|
||||
}
|
||||
|
||||
rowHeights = r.expandRowHeigths(colWidths)
|
||||
return
|
||||
}
|
||||
|
||||
// shrinkTableWidth shrinks the table width.
|
||||
func (r *resizer) shrinkTableWidth() (colWidths, rowHeights []int) {
|
||||
colWidths = r.maxColumnWidths()
|
||||
|
||||
// Cut width of columns that are way too big.
|
||||
shrinkBiggestColumns := func(veryBigOnly bool) {
|
||||
for {
|
||||
totalWidth := sum(colWidths) + r.totalHorizontalBorder()
|
||||
if totalWidth <= r.tableWidth {
|
||||
break
|
||||
}
|
||||
|
||||
bigColumnIndex := -math.MaxInt32
|
||||
bigColumnWidth := -math.MaxInt32
|
||||
|
||||
for j, width := range colWidths {
|
||||
if width == r.columns[j].fixedWidth {
|
||||
continue
|
||||
}
|
||||
if veryBigOnly {
|
||||
if width >= (r.tableWidth/2) && width > bigColumnWidth { //nolint:mnd
|
||||
bigColumnWidth = width
|
||||
bigColumnIndex = j
|
||||
}
|
||||
} else {
|
||||
if width > bigColumnWidth {
|
||||
bigColumnWidth = width
|
||||
bigColumnIndex = j
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if bigColumnIndex < 0 || colWidths[bigColumnIndex] == 0 {
|
||||
break
|
||||
}
|
||||
colWidths[bigColumnIndex]--
|
||||
}
|
||||
}
|
||||
|
||||
// Cut width of columns that differ the most from the median.
|
||||
shrinkToMedian := func() {
|
||||
for {
|
||||
totalWidth := sum(colWidths) + r.totalHorizontalBorder()
|
||||
if totalWidth <= r.tableWidth {
|
||||
break
|
||||
}
|
||||
|
||||
biggestDiffToMedian := -math.MaxInt32
|
||||
biggestDiffToMedianIndex := -math.MaxInt32
|
||||
|
||||
for j, width := range colWidths {
|
||||
if width == r.columns[j].fixedWidth {
|
||||
continue
|
||||
}
|
||||
diffToMedian := width - r.columns[j].median
|
||||
if diffToMedian > 0 && diffToMedian > biggestDiffToMedian {
|
||||
biggestDiffToMedian = diffToMedian
|
||||
biggestDiffToMedianIndex = j
|
||||
}
|
||||
}
|
||||
|
||||
if biggestDiffToMedianIndex <= 0 || colWidths[biggestDiffToMedianIndex] == 0 {
|
||||
break
|
||||
}
|
||||
colWidths[biggestDiffToMedianIndex]--
|
||||
}
|
||||
}
|
||||
|
||||
shrinkBiggestColumns(true)
|
||||
shrinkToMedian()
|
||||
shrinkBiggestColumns(false)
|
||||
|
||||
return colWidths, r.expandRowHeigths(colWidths)
|
||||
}
|
||||
|
||||
// expandRowHeigths expands the row heights.
|
||||
func (r *resizer) expandRowHeigths(colWidths []int) (rowHeights []int) {
|
||||
rowHeights = r.defaultRowHeights()
|
||||
if !r.wrap {
|
||||
return rowHeights
|
||||
}
|
||||
for i, row := range r.allRows {
|
||||
for j, cell := range row {
|
||||
height := r.detectContentHeight(cell, colWidths[j]-r.xPaddingForCol(j)) + r.xPaddingForCell(i, j)
|
||||
if height > rowHeights[i] {
|
||||
rowHeights[i] = height
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// defaultRowHeights returns the default row heights.
|
||||
func (r *resizer) defaultRowHeights() (rowHeights []int) {
|
||||
rowHeights = make([]int, len(r.allRows))
|
||||
for i := range rowHeights {
|
||||
if i < len(r.rowHeights) {
|
||||
rowHeights[i] = r.rowHeights[i]
|
||||
}
|
||||
if rowHeights[i] < 1 {
|
||||
rowHeights[i] = 1
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// maxColumnWidths returns the maximum column widths.
|
||||
func (r *resizer) maxColumnWidths() []int {
|
||||
maxColumnWidths := make([]int, len(r.columns))
|
||||
for i, col := range r.columns {
|
||||
if col.fixedWidth > 0 {
|
||||
maxColumnWidths[i] = col.fixedWidth
|
||||
} else {
|
||||
maxColumnWidths[i] = col.max + r.xPaddingForCol(col.index)
|
||||
}
|
||||
}
|
||||
return maxColumnWidths
|
||||
}
|
||||
|
||||
// columnCount returns the column count.
|
||||
func (r *resizer) columnCount() int {
|
||||
return len(r.columns)
|
||||
}
|
||||
|
||||
// maxCharCount returns the maximum character count.
|
||||
func (r *resizer) maxCharCount() int {
|
||||
var count int
|
||||
for _, col := range r.columns {
|
||||
if col.fixedWidth > 0 {
|
||||
count += col.fixedWidth - r.xPaddingForCol(col.index)
|
||||
} else {
|
||||
count += col.max
|
||||
}
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// maxTotal returns the maximum total width.
|
||||
func (r *resizer) maxTotal() (maxTotal int) {
|
||||
for j, column := range r.columns {
|
||||
if column.fixedWidth > 0 {
|
||||
maxTotal += column.fixedWidth
|
||||
} else {
|
||||
maxTotal += column.max + r.xPaddingForCol(j)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// totalHorizontalPadding returns the total padding.
|
||||
func (r *resizer) totalHorizontalPadding() (totalHorizontalPadding int) {
|
||||
for _, col := range r.columns {
|
||||
totalHorizontalPadding += col.xPadding
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// xPaddingForCol returns the horizontal padding for a column.
|
||||
func (r *resizer) xPaddingForCol(j int) int {
|
||||
if j >= len(r.columns) {
|
||||
return 0
|
||||
}
|
||||
return r.columns[j].xPadding
|
||||
}
|
||||
|
||||
// xPaddingForCell returns the horizontal padding for a cell.
|
||||
func (r *resizer) xPaddingForCell(i, j int) int {
|
||||
if i >= len(r.yPaddings) || j >= len(r.yPaddings[i]) {
|
||||
return 0
|
||||
}
|
||||
return r.yPaddings[i][j]
|
||||
}
|
||||
|
||||
// totalHorizontalBorder returns the total border.
|
||||
func (r *resizer) totalHorizontalBorder() int {
|
||||
return (r.columnCount() * r.borderPerCell()) + r.extraBorder()
|
||||
}
|
||||
|
||||
// borderPerCell returns number of border chars per cell.
|
||||
func (r *resizer) borderPerCell() int {
|
||||
if r.borderColumn {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// extraBorder returns the number of the extra border char at the end of the table.
|
||||
func (r *resizer) extraBorder() int {
|
||||
if r.borderColumn {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// detectContentHeight detects the content height.
|
||||
func (r *resizer) detectContentHeight(content string, width int) (height int) {
|
||||
if width == 0 {
|
||||
return 1
|
||||
}
|
||||
content = strings.ReplaceAll(content, "\r\n", "\n")
|
||||
for _, line := range strings.Split(content, "\n") {
|
||||
height += strings.Count(ansi.Wrap(line, width, ""), "\n") + 1
|
||||
}
|
||||
return
|
||||
}
|
16
vendor/github.com/charmbracelet/lipgloss/table/rows.go
generated
vendored
16
vendor/github.com/charmbracelet/lipgloss/table/rows.go
generated
vendored
@ -111,3 +111,19 @@ func (m *Filter) Rows() int {
|
||||
|
||||
return j
|
||||
}
|
||||
|
||||
// dataToMatrix converts an object that implements the Data interface to a table.
|
||||
func dataToMatrix(data Data) (rows [][]string) {
|
||||
numRows := data.Rows()
|
||||
numCols := data.Columns()
|
||||
rows = make([][]string, numRows)
|
||||
|
||||
for i := 0; i < numRows; i++ {
|
||||
rows[i] = make([]string, numCols)
|
||||
|
||||
for j := 0; j < numCols; j++ {
|
||||
rows[i][j] = data.At(i, j)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
142
vendor/github.com/charmbracelet/lipgloss/table/table.go
generated
vendored
142
vendor/github.com/charmbracelet/lipgloss/table/table.go
generated
vendored
@ -61,6 +61,7 @@ type Table struct {
|
||||
height int
|
||||
useManualHeight bool
|
||||
offset int
|
||||
wrap bool
|
||||
|
||||
// widths tracks the width of each column.
|
||||
widths []int
|
||||
@ -83,6 +84,7 @@ func New() *Table {
|
||||
borderLeft: true,
|
||||
borderRight: true,
|
||||
borderTop: true,
|
||||
wrap: true,
|
||||
data: NewStringData(),
|
||||
}
|
||||
}
|
||||
@ -209,11 +211,20 @@ func (t *Table) Height(h int) *Table {
|
||||
}
|
||||
|
||||
// Offset sets the table rendering offset.
|
||||
//
|
||||
// Warning: you may declare Offset only after setting Rows. Otherwise it will be
|
||||
// ignored.
|
||||
func (t *Table) Offset(o int) *Table {
|
||||
t.offset = o
|
||||
return t
|
||||
}
|
||||
|
||||
// Wrap dictates whether or not the table content should wrap.
|
||||
func (t *Table) Wrap(w bool) *Table {
|
||||
t.wrap = w
|
||||
return t
|
||||
}
|
||||
|
||||
// String returns the table as a string.
|
||||
func (t *Table) String() string {
|
||||
hasHeaders := len(t.headers) > 0
|
||||
@ -231,120 +242,8 @@ func (t *Table) String() string {
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize the widths.
|
||||
t.widths = make([]int, max(len(t.headers), t.data.Columns()))
|
||||
t.heights = make([]int, btoi(hasHeaders)+t.data.Rows())
|
||||
|
||||
// The style function may affect width of the table. It's possible to set
|
||||
// the StyleFunc after the headers and rows. Update the widths for a final
|
||||
// time.
|
||||
for i, cell := range t.headers {
|
||||
t.widths[i] = max(t.widths[i], lipgloss.Width(t.style(HeaderRow, i).Render(cell)))
|
||||
t.heights[0] = max(t.heights[0], lipgloss.Height(t.style(HeaderRow, i).Render(cell)))
|
||||
}
|
||||
|
||||
for r := 0; r < t.data.Rows(); r++ {
|
||||
for i := 0; i < t.data.Columns(); i++ {
|
||||
cell := t.data.At(r, i)
|
||||
|
||||
rendered := t.style(r, i).Render(cell)
|
||||
t.heights[r+btoi(hasHeaders)] = max(t.heights[r+btoi(hasHeaders)], lipgloss.Height(rendered))
|
||||
t.widths[i] = max(t.widths[i], lipgloss.Width(rendered))
|
||||
}
|
||||
}
|
||||
|
||||
// Table Resizing Logic.
|
||||
//
|
||||
// Given a user defined table width, we must ensure the table is exactly that
|
||||
// width. This must account for all borders, column, separators, and column
|
||||
// data.
|
||||
//
|
||||
// In the case where the table is narrower than the specified table width,
|
||||
// we simply expand the columns evenly to fit the width.
|
||||
// For example, a table with 3 columns takes up 50 characters total, and the
|
||||
// width specified is 80, we expand each column by 10 characters, adding 30
|
||||
// to the total width.
|
||||
//
|
||||
// In the case where the table is wider than the specified table width, we
|
||||
// _could_ simply shrink the columns evenly but this would result in data
|
||||
// being truncated (perhaps unnecessarily). The naive approach could result
|
||||
// in very poor cropping of the table data. So, instead of shrinking columns
|
||||
// evenly, we calculate the median non-whitespace length of each column, and
|
||||
// shrink the columns based on the largest median.
|
||||
//
|
||||
// For example,
|
||||
// ┌──────┬───────────────┬──────────┐
|
||||
// │ Name │ Age of Person │ Location │
|
||||
// ├──────┼───────────────┼──────────┤
|
||||
// │ Kini │ 40 │ New York │
|
||||
// │ Eli │ 30 │ London │
|
||||
// │ Iris │ 20 │ Paris │
|
||||
// └──────┴───────────────┴──────────┘
|
||||
//
|
||||
// Median non-whitespace length vs column width of each column:
|
||||
//
|
||||
// Name: 4 / 5
|
||||
// Age of Person: 2 / 15
|
||||
// Location: 6 / 10
|
||||
//
|
||||
// The biggest difference is 15 - 2, so we can shrink the 2nd column by 13.
|
||||
|
||||
width := t.computeWidth()
|
||||
|
||||
if width < t.width && t.width > 0 {
|
||||
// Table is too narrow, expand the columns evenly until it reaches the
|
||||
// desired width.
|
||||
var i int
|
||||
for width < t.width {
|
||||
t.widths[i]++
|
||||
width++
|
||||
i = (i + 1) % len(t.widths)
|
||||
}
|
||||
} else if width > t.width && t.width > 0 {
|
||||
// Table is too wide, calculate the median non-whitespace length of each
|
||||
// column, and shrink the columns based on the largest difference.
|
||||
columnMedians := make([]int, len(t.widths))
|
||||
for c := range t.widths {
|
||||
trimmedWidth := make([]int, t.data.Rows())
|
||||
for r := 0; r < t.data.Rows(); r++ {
|
||||
renderedCell := t.style(r+btoi(hasHeaders), c).Render(t.data.At(r, c))
|
||||
nonWhitespaceChars := lipgloss.Width(strings.TrimRight(renderedCell, " "))
|
||||
trimmedWidth[r] = nonWhitespaceChars + 1
|
||||
}
|
||||
|
||||
columnMedians[c] = median(trimmedWidth)
|
||||
}
|
||||
|
||||
// Find the biggest differences between the median and the column width.
|
||||
// Shrink the columns based on the largest difference.
|
||||
differences := make([]int, len(t.widths))
|
||||
for i := range t.widths {
|
||||
differences[i] = t.widths[i] - columnMedians[i]
|
||||
}
|
||||
|
||||
for width > t.width {
|
||||
index, _ := largest(differences)
|
||||
if differences[index] < 1 {
|
||||
break
|
||||
}
|
||||
|
||||
shrink := min(differences[index], width-t.width)
|
||||
t.widths[index] -= shrink
|
||||
width -= shrink
|
||||
differences[index] = 0
|
||||
}
|
||||
|
||||
// Table is still too wide, begin shrinking the columns based on the
|
||||
// largest column.
|
||||
for width > t.width {
|
||||
index, _ := largest(t.widths)
|
||||
if t.widths[index] < 1 {
|
||||
break
|
||||
}
|
||||
t.widths[index]--
|
||||
width--
|
||||
}
|
||||
}
|
||||
// Do all the sizing calculations for width and height.
|
||||
t.resize()
|
||||
|
||||
var sb strings.Builder
|
||||
|
||||
@ -393,15 +292,6 @@ func (t *Table) String() string {
|
||||
Render(sb.String())
|
||||
}
|
||||
|
||||
// computeWidth computes the width of the table in it's current configuration.
|
||||
func (t *Table) computeWidth() int {
|
||||
width := sum(t.widths) + btoi(t.borderLeft) + btoi(t.borderRight)
|
||||
if t.borderColumn {
|
||||
width += len(t.widths) - 1
|
||||
}
|
||||
return width
|
||||
}
|
||||
|
||||
// computeHeight computes the height of the table in it's current configuration.
|
||||
func (t *Table) computeHeight() int {
|
||||
hasHeaders := len(t.headers) > 0
|
||||
@ -553,13 +443,17 @@ func (t *Table) constructRow(index int, isOverflow bool) string {
|
||||
}
|
||||
|
||||
cellStyle := t.style(index, c)
|
||||
if !t.wrap {
|
||||
length := (cellWidth * height) - cellStyle.GetHorizontalPadding()
|
||||
cell = ansi.Truncate(cell, length, "…")
|
||||
}
|
||||
cells = append(cells, cellStyle.
|
||||
// Account for the margins in the cell sizing.
|
||||
Height(height-cellStyle.GetVerticalMargins()).
|
||||
MaxHeight(height).
|
||||
Width(t.widths[c]-cellStyle.GetHorizontalMargins()).
|
||||
MaxWidth(t.widths[c]).
|
||||
Render(ansi.Truncate(cell, cellWidth*height, "…")))
|
||||
Render(cell))
|
||||
|
||||
if c < t.data.Columns()-1 && t.borderColumn {
|
||||
cells = append(cells, left)
|
||||
|
18
vendor/github.com/charmbracelet/lipgloss/table/util.go
generated
vendored
18
vendor/github.com/charmbracelet/lipgloss/table/util.go
generated
vendored
@ -20,7 +20,7 @@ func max(a, b int) int { //nolint:predeclared
|
||||
return b
|
||||
}
|
||||
|
||||
// min returns the greater of two integers.
|
||||
// min returns the smaller of two integers.
|
||||
func min(a, b int) int { //nolint:predeclared
|
||||
if a < b {
|
||||
return a
|
||||
@ -45,20 +45,8 @@ func median(n []int) int {
|
||||
return 0
|
||||
}
|
||||
if len(n)%2 == 0 {
|
||||
h := len(n) / 2 //nolint:gomnd
|
||||
return (n[h-1] + n[h]) / 2 //nolint:gomnd
|
||||
h := len(n) / 2 //nolint:mnd
|
||||
return (n[h-1] + n[h]) / 2 //nolint:mnd
|
||||
}
|
||||
return n[len(n)/2]
|
||||
}
|
||||
|
||||
// largest returns the largest element and it's index from a slice of integers.
|
||||
func largest(n []int) (int, int) { //nolint:unparam
|
||||
var largest, index int
|
||||
for i, e := range n {
|
||||
if n[i] > n[index] {
|
||||
largest = e
|
||||
index = i
|
||||
}
|
||||
}
|
||||
return index, largest
|
||||
}
|
||||
|
11
vendor/github.com/charmbracelet/log/.golangci.yml
generated
vendored
11
vendor/github.com/charmbracelet/log/.golangci.yml
generated
vendored
@ -15,20 +15,27 @@ issues:
|
||||
linters:
|
||||
enable:
|
||||
- bodyclose
|
||||
- dupl
|
||||
- exportloopref
|
||||
- exhaustive
|
||||
- goconst
|
||||
- godot
|
||||
- godox
|
||||
- gofumpt
|
||||
- goimports
|
||||
- gomoddirectives
|
||||
- goprintffuncname
|
||||
- gosec
|
||||
- misspell
|
||||
- nakedret
|
||||
- nestif
|
||||
- nilerr
|
||||
- noctx
|
||||
- nolintlint
|
||||
- prealloc
|
||||
- revive
|
||||
- rowserrcheck
|
||||
- sqlclosecheck
|
||||
- tparallel
|
||||
- unconvert
|
||||
- unparam
|
||||
- whitespace
|
||||
- wrapcheck
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user