Compare commits

..

20 Commits

Author SHA1 Message Date
f4cadb5ad2 fix(cmd): Uses uppercase t for tty shorthand flag 2025-02-10 15:12:17 +01:00
cee0fa7c8e fix: app new with chaos should just take the local repo as it is
All checks were successful
continuous-integration/drone/pr Build is passing
2025-02-10 14:58:13 +01:00
8a7fe4ca07 fix: prompt, skip adding if next present
All checks were successful
continuous-integration/drone/push Build is passing
#486
2025-01-17 17:46:41 +01:00
64ad60663f test: adjust for new abra-test-recipe version
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
See #470
2025-01-09 13:14:47 +00:00
cb3f46b46e fix: redirect to stderr for machine output
All checks were successful
continuous-integration/drone/push Build is passing
See #477
2025-01-09 11:23:36 +00:00
41e514ae9a test: reset after deploy
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2025-01-09 11:54:39 +01:00
086b4828ff docs: better comments, remove redundant output check 2025-01-09 11:54:38 +01:00
ed263854d4 fix: show N/A if env version unknown
See #478
2025-01-09 11:54:37 +01:00
eb6fe4ba6e fix: dont set chaos label if no chaos
See #478
2025-01-09 11:54:36 +01:00
993172d31b test: ensure .env version written
All checks were successful
continuous-integration/drone/push Build is passing
2025-01-08 13:42:35 +00:00
c70b6e72a7 test: ensure unstaged changes preserved 2025-01-08 13:42:35 +00:00
22e4dd7fca fix: app new from chaos changes
See #471
2025-01-08 13:42:35 +00:00
b6009057a8 docs: note temp autocomplete, less whitespace
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2025-01-08 12:10:17 +01:00
b978f04910 fix: use "sudo tee" to avoid permissions error
See #474
2025-01-08 12:09:51 +01:00
3ac29d54d9 chore: go update des/vendor
All checks were successful
continuous-integration/drone/push Build is passing
2025-01-07 16:59:56 +01:00
877c17fab5 test: re-enable this one
All checks were successful
continuous-integration/drone/push Build is passing
2025-01-05 16:46:48 +01:00
f01fd26ce3 test: git status output 2025-01-05 16:46:38 +01:00
273c165a41 docs: --chaos/-C handling for catalogue generate 2025-01-05 16:46:20 +01:00
c88fc66c99 test: moar chaos stability 😌 [ci skip] 2025-01-05 16:12:06 +01:00
9b271a6963 docs: moar authors [ci skip] 2025-01-05 15:53:17 +01:00
62 changed files with 1515 additions and 685 deletions

View File

@ -4,6 +4,7 @@
> please do add yourself! This is a community project, let's show some 💞 > please do add yourself! This is a community project, let's show some 💞
- 3wordchant - 3wordchant
- ammaratef45
- cassowary - cassowary
- codegod100 - codegod100
- decentral1se - decentral1se
@ -17,3 +18,5 @@
- roxxers - roxxers
- vera - vera
- yksflip - yksflip
- basebuilder
- mayel

View File

@ -261,7 +261,7 @@ func init() {
AppCmdCommand.Flags().BoolVarP( AppCmdCommand.Flags().BoolVarP(
&requestTTY, &requestTTY,
"tty", "tty",
"t", "T",
false, false,
"request remote TTY", "request remote TTY",
) )

View File

@ -179,7 +179,9 @@ checkout as-is. Recipe commit hashes are also supported as values for
appPkg.ExposeAllEnv(stackName, compose, app.Env) appPkg.ExposeAllEnv(stackName, compose, app.Env)
appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name) appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name)
appPkg.SetChaosLabel(compose, stackName, internal.Chaos) appPkg.SetChaosLabel(compose, stackName, internal.Chaos)
appPkg.SetChaosVersionLabel(compose, stackName, toDeployChaosVersionLabel) if internal.Chaos {
appPkg.SetChaosVersionLabel(compose, stackName, toDeployChaosVersionLabel)
}
appPkg.SetUpdateLabel(compose, stackName, app.Env) appPkg.SetUpdateLabel(compose, stackName, app.Env)
envVars, err := appPkg.CheckEnv(app) envVars, err := appPkg.CheckEnv(app)

View File

@ -75,43 +75,41 @@ var AppNewCommand = &cobra.Command{
chaosVersion := config.CHAOS_DEFAULT chaosVersion := config.CHAOS_DEFAULT
if internal.Chaos { if internal.Chaos {
recipeVersion = chaosVersion
if !internal.Offline {
if err := recipe.EnsureUpToDate(); err != nil {
log.Fatal(err)
}
}
}
if !internal.Chaos {
if err := recipe.EnsureIsClean(); err != nil {
log.Fatal(err)
}
}
var recipeVersions recipePkg.RecipeVersions
if recipeVersion == "" {
var err error var err error
recipeVersions, _, err = recipe.GetRecipeVersions() chaosVersion, err = recipe.ChaosVersion()
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
}
if len(recipeVersions) > 0 { recipeVersion = chaosVersion
latest := recipeVersions[len(recipeVersions)-1]
for tag := range latest {
recipeVersion = tag
}
if _, err := recipe.EnsureVersion(recipeVersion); err != nil {
log.Fatal(err)
}
} else { } else {
if err := recipe.EnsureLatest(); err != nil { if err := recipe.EnsureIsClean(); err != nil {
log.Fatal(err) log.Fatal(err)
} }
var recipeVersions recipePkg.RecipeVersions
if recipeVersion == "" {
var err error
recipeVersions, _, err = recipe.GetRecipeVersions()
if err != nil {
log.Fatal(err)
}
}
if len(recipeVersions) > 0 {
latest := recipeVersions[len(recipeVersions)-1]
for tag := range latest {
recipeVersion = tag
}
if _, err := recipe.EnsureVersion(recipeVersion); err != nil {
log.Fatal(err)
}
} else {
if err := recipe.EnsureLatest(); err != nil {
log.Fatal(err)
}
}
} }
if err := ensureServerFlag(); err != nil { if err := ensureServerFlag(); err != nil {
@ -211,7 +209,16 @@ var AppNewCommand = &cobra.Command{
log.Fatal(err) log.Fatal(err)
} }
if err := app.WriteRecipeVersion(recipeVersion, false); err != nil { if err := app.Recipe.IsDirty(); err != nil {
log.Fatal(err)
}
toWriteVersion := recipeVersion
if internal.Chaos || app.Recipe.Dirty {
toWriteVersion = chaosVersion
}
if err := app.WriteRecipeVersion(toWriteVersion, false); err != nil {
log.Fatalf("writing recipe version failed: %s", err) log.Fatalf("writing recipe version failed: %s", err)
} }
}, },

View File

@ -178,7 +178,9 @@ beforehand. See "abra app backup" for more.`,
appPkg.ExposeAllEnv(stackName, compose, app.Env) appPkg.ExposeAllEnv(stackName, compose, app.Env)
appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name) appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name)
appPkg.SetChaosLabel(compose, stackName, internal.Chaos) appPkg.SetChaosLabel(compose, stackName, internal.Chaos)
appPkg.SetChaosVersionLabel(compose, stackName, chosenDowngrade) if internal.Chaos {
appPkg.SetChaosVersionLabel(compose, stackName, chosenDowngrade)
}
appPkg.SetUpdateLabel(compose, stackName, app.Env) appPkg.SetUpdateLabel(compose, stackName, app.Env)
chaosVersion := config.CHAOS_DEFAULT chaosVersion := config.CHAOS_DEFAULT

View File

@ -183,7 +183,9 @@ beforehand. See "abra app backup" for more.`,
appPkg.ExposeAllEnv(stackName, compose, app.Env) appPkg.ExposeAllEnv(stackName, compose, app.Env)
appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name) appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name)
appPkg.SetChaosLabel(compose, stackName, internal.Chaos) appPkg.SetChaosLabel(compose, stackName, internal.Chaos)
appPkg.SetChaosVersionLabel(compose, stackName, chosenUpgrade) if internal.Chaos {
appPkg.SetChaosVersionLabel(compose, stackName, chosenUpgrade)
}
appPkg.SetUpdateLabel(compose, stackName, app.Env) appPkg.SetUpdateLabel(compose, stackName, app.Env)
envVars, err := appPkg.CheckEnv(app) envVars, err := appPkg.CheckEnv(app)

View File

@ -25,6 +25,11 @@ var CatalogueGenerateCommand = &cobra.Command{
Short: "Generate the recipe catalogue", Short: "Generate the recipe catalogue",
Long: `Generate a new copy of the recipe catalogue. Long: `Generate a new copy of the recipe catalogue.
N.B. this command **will** wipe local unstaged changes from your local recipes
if present. "--chaos/-C" on this command refers to the catalogue repository
("$ABRA_DIR/catalogue") and not the recipes. Please take care not to lose your
changes.
It is possible to generate new metadata for a single recipe by passing It is possible to generate new metadata for a single recipe by passing
[recipe]. The existing local catalogue will be updated, not overwritten. [recipe]. The existing local catalogue will be updated, not overwritten.

View File

@ -12,17 +12,16 @@ var AutocompleteCommand = &cobra.Command{
Long: `To load completions: Long: `To load completions:
Bash: Bash:
# Load autocompletion for the current Bash session
$ source <(abra autocomplete bash) $ source <(abra autocomplete bash)
# To load autocompletion for each session, execute once: # To load autocompletion for each session, execute once:
# Linux: # Linux:
$ abra autocomplete bash > /etc/bash_completion.d/abra $ abra autocomplete bash | sudo tee /etc/bash_completion.d/abra
# macOS: # macOS:
$ abra autocomplete bash > $(brew --prefix)/etc/bash_completion.d/abra $ abra autocomplete bash | sudo tee $(brew --prefix)/etc/bash_completion.d/abra
Zsh: Zsh:
# If shell autocompletion is not already enabled in your environment, # If shell autocompletion is not already enabled in your environment,
# you will need to enable it. You can execute the following once: # you will need to enable it. You can execute the following once:
@ -34,14 +33,12 @@ Zsh:
# You will need to start a new shell for this setup to take effect. # You will need to start a new shell for this setup to take effect.
fish: fish:
$ abra autocomplete fish | source $ abra autocomplete fish | source
# To load autocompletions for each session, execute once: # To load autocompletions for each session, execute once:
$ abra autocomplete fish > ~/.config/fish/completions/abra.fish $ abra autocomplete fish > ~/.config/fish/completions/abra.fish
PowerShell: PowerShell:
PS> abra autocomplete powershell | Out-String | Invoke-Expression PS> abra autocomplete powershell | Out-String | Invoke-Expression
# To load autocompletions for every new session, run: # To load autocompletions for every new session, run:

View File

@ -64,6 +64,15 @@ func NewVersionOverview(
upperKind := strings.ToUpper(kind) upperKind := strings.ToUpper(kind)
envVersion, err := recipe.GetEnvVersionRaw(app.Recipe.Name)
if err != nil {
return err
}
if envVersion == "" {
envVersion = config.NO_VERSION_DEFAULT
}
rows := [][]string{ rows := [][]string{
{"DOMAIN", domain}, {"DOMAIN", domain},
{"RECIPE", app.Recipe.Name}, {"RECIPE", app.Recipe.Name},
@ -78,7 +87,7 @@ func NewVersionOverview(
{"VERSION", formatter.BoldDirtyDefault(toDeployVersion)}, {"VERSION", formatter.BoldDirtyDefault(toDeployVersion)},
{fmt.Sprintf("%s.ENV", strings.ToUpper(app.Domain)), "---"}, {fmt.Sprintf("%s.ENV", strings.ToUpper(app.Domain)), "---"},
{"CURRENT VERSION", formatter.BoldDirtyDefault(app.Recipe.EnvVersion)}, {"CURRENT VERSION", formatter.BoldDirtyDefault(envVersion)},
{"NEW VERSION", formatter.BoldDirtyDefault(toDeployVersion)}, {"NEW VERSION", formatter.BoldDirtyDefault(toDeployVersion)},
} }

View File

@ -267,6 +267,8 @@ func addReleaseNotes(recipe recipe.Recipe, tag string) error {
return err return err
} }
var addNextAsReleaseNotes bool
nextReleaseNotePath := path.Join(releaseDir, "next") nextReleaseNotePath := path.Join(releaseDir, "next")
if _, err := os.Stat(nextReleaseNotePath); err == nil { if _, err := os.Stat(nextReleaseNotePath); err == nil {
// release/next note exists. Move it to release/<tag> // release/next note exists. Move it to release/<tag>
@ -276,38 +278,37 @@ func addReleaseNotes(recipe recipe.Recipe, tag string) error {
} }
if !internal.NoInput { if !internal.NoInput {
prompt := &survey.Input{ prompt := &survey.Confirm{
Message: "Use release note in release/next?", Message: "Use release note in release/next?",
} }
var addReleaseNote bool
if err := survey.AskOne(prompt, &addReleaseNote); err != nil { if err := survey.AskOne(prompt, &addNextAsReleaseNotes); err != nil {
return err return err
} }
if !addReleaseNote {
if !addNextAsReleaseNotes {
return nil return nil
} }
} }
err := os.Rename(nextReleaseNotePath, tagReleaseNotePath) if err := os.Rename(nextReleaseNotePath, tagReleaseNotePath); err != nil {
if err != nil {
return err return err
} }
err = gitPkg.Add(recipe.Dir, path.Join("release", "next"), internal.Dry) if err := gitPkg.Add(recipe.Dir, path.Join("release", "next"), internal.Dry); err != nil {
if err != nil {
return err return err
} }
err = gitPkg.Add(recipe.Dir, path.Join("release", tag), internal.Dry) if err := gitPkg.Add(recipe.Dir, path.Join("release", tag), internal.Dry); err != nil {
if err != nil {
return err return err
} }
} else if !errors.Is(err, os.ErrNotExist) { } else if !errors.Is(err, os.ErrNotExist) {
return err return err
} }
// No release note exists for the current release. // NOTE(d1): No release note exists for the current release. Or, we've
if internal.NoInput { // already used release/next as the release note
if internal.NoInput || addNextAsReleaseNotes {
return nil return nil
} }

View File

@ -51,6 +51,10 @@ func Run(version, commit string) {
log.Logger.SetStyles(charmLog.DefaultStyles()) log.Logger.SetStyles(charmLog.DefaultStyles())
charmLog.SetDefault(log.Logger) charmLog.SetDefault(log.Logger)
if internal.MachineReadable {
log.SetOutput(os.Stderr)
}
if internal.Debug { if internal.Debug {
log.SetLevel(log.DebugLevel) log.SetLevel(log.DebugLevel)
log.SetOutput(os.Stderr) log.SetOutput(os.Stderr)

24
go.mod
View File

@ -17,10 +17,10 @@ require (
github.com/go-git/go-git/v5 v5.13.1 github.com/go-git/go-git/v5 v5.13.1
github.com/google/go-cmp v0.6.0 github.com/google/go-cmp v0.6.0
github.com/moby/sys/signal v0.7.1 github.com/moby/sys/signal v0.7.1
github.com/moby/term v0.5.0 github.com/moby/term v0.5.2
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/schollz/progressbar/v3 v3.17.1 github.com/schollz/progressbar/v3 v3.17.1
golang.org/x/term v0.27.0 golang.org/x/term v0.28.0
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
gotest.tools/v3 v3.5.1 gotest.tools/v3 v3.5.1
) )
@ -111,19 +111,19 @@ require (
go.opentelemetry.io/otel/sdk v1.33.0 // indirect go.opentelemetry.io/otel/sdk v1.33.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.33.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.33.0 // indirect
go.opentelemetry.io/otel/trace v1.33.0 // indirect go.opentelemetry.io/otel/trace v1.33.0 // indirect
go.opentelemetry.io/proto/otlp v1.4.0 // indirect go.opentelemetry.io/proto/otlp v1.5.0 // indirect
golang.org/x/crypto v0.31.0 // indirect golang.org/x/crypto v0.32.0 // indirect
golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329 // indirect golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 // indirect
golang.org/x/mod v0.22.0 // indirect golang.org/x/mod v0.22.0 // indirect
golang.org/x/net v0.33.0 // indirect golang.org/x/net v0.34.0 // indirect
golang.org/x/sync v0.10.0 // indirect golang.org/x/sync v0.10.0 // indirect
golang.org/x/text v0.21.0 // indirect golang.org/x/text v0.21.0 // indirect
golang.org/x/time v0.8.0 // indirect golang.org/x/time v0.9.0 // indirect
golang.org/x/tools v0.28.0 // indirect golang.org/x/tools v0.29.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422 // indirect
google.golang.org/grpc v1.69.2 // indirect google.golang.org/grpc v1.69.2 // indirect
google.golang.org/protobuf v1.36.1 // indirect google.golang.org/protobuf v1.36.2 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect
) )
@ -146,5 +146,5 @@ require (
github.com/stretchr/testify v1.10.0 github.com/stretchr/testify v1.10.0
github.com/theupdateframework/notary v0.7.0 // indirect github.com/theupdateframework/notary v0.7.0 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
golang.org/x/sys v0.28.0 golang.org/x/sys v0.29.0
) )

28
go.sum
View File

@ -131,6 +131,7 @@ github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInq
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
@ -519,6 +520,7 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1 h1:VNqngBF40hVlDloBruUehVYC3ArSgIyScOAyMRqBxRg= github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1 h1:VNqngBF40hVlDloBruUehVYC3ArSgIyScOAyMRqBxRg=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1/go.mod h1:RBRO7fro65R6tjKzYgLAFo0t1QEXY1Dp+i/bvpRiqiQ= github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1/go.mod h1:RBRO7fro65R6tjKzYgLAFo0t1QEXY1Dp+i/bvpRiqiQ=
@ -673,6 +675,8 @@ github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcY
github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo=
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ=
github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
@ -806,6 +810,7 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
@ -954,6 +959,8 @@ go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37Cb
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg= go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg=
go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY= go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY=
go.opentelemetry.io/proto/otlp v1.5.0 h1:xJvq7gMzB31/d406fB8U5CBdyQGw4P399D1aQWU/3i4=
go.opentelemetry.io/proto/otlp v1.5.0/go.mod h1:keN8WnHxOy8PG0rQZjJJ5A2ebUoafqWp0eVQ4yIXvJ4=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
@ -980,6 +987,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -992,6 +1001,8 @@ golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EH
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329 h1:9kj3STMvgqy3YA4VQXBrN7925ICMxD5wzMRcgA30588= golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329 h1:9kj3STMvgqy3YA4VQXBrN7925ICMxD5wzMRcgA30588=
golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329/go.mod h1:qj5a5QZpwLU2NLQudwIN5koi3beDhSAlJwa67PuM98c= golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329/go.mod h1:qj5a5QZpwLU2NLQudwIN5koi3beDhSAlJwa67PuM98c=
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 h1:yqrTHse8TCMW1M1ZCP+VAR/l0kKxwaAIqN/il7x4voA=
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8/go.mod h1:tujkw807nyEEAamNbDrEGzRav+ilXA7PCRAd6xsmwiU=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@ -1059,6 +1070,8 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I= golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -1159,11 +1172,15 @@ golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q= golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -1183,6 +1200,8 @@ golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxb
golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg= golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY=
golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@ -1230,6 +1249,8 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8= golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8=
golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw= golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw=
golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE=
golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@ -1280,8 +1301,12 @@ google.golang.org/genproto v0.0.0-20200527145253-8367513e4ece/go.mod h1:jDfRM7Fc
google.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d h1:H8tOf8XM88HvKqLTxe755haY6r1fqqzLbEnfrmLXlSA= google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d h1:H8tOf8XM88HvKqLTxe755haY6r1fqqzLbEnfrmLXlSA=
google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d/go.mod h1:2v7Z7gP2ZUOGsaFyxATQSRoBnKygqVq2Cwnvom7QiqY= google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d/go.mod h1:2v7Z7gP2ZUOGsaFyxATQSRoBnKygqVq2Cwnvom7QiqY=
google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422 h1:GVIKPyP/kLIyVOgOnTwFOrvQaQUzOzGMCxgFUOEmm24=
google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422/go.mod h1:b6h1vNKhxaSoEI+5jc3PJUCustfli/mRab7295pY7rw=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d h1:xJJRGY7TJcvIlpSrN3K6LAWgNFUILlO+OMAqtg9aqnw= google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d h1:xJJRGY7TJcvIlpSrN3K6LAWgNFUILlO+OMAqtg9aqnw=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d/go.mod h1:3ENsm/5D1mzDyhpzeRi1NR784I0BcofWBoSc5QqqMK4= google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d/go.mod h1:3ENsm/5D1mzDyhpzeRi1NR784I0BcofWBoSc5QqqMK4=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422 h1:3UsHvIr4Wc2aW4brOaSCmcxh9ksica6fHEr8P1XhkYw=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422/go.mod h1:3ENsm/5D1mzDyhpzeRi1NR784I0BcofWBoSc5QqqMK4=
google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
@ -1318,6 +1343,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/gk= google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/gk=
google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
google.golang.org/protobuf v1.36.2 h1:R8FeyR1/eLmkutZOM5CWghmo5itiG9z0ktFlTVLuTmU=
google.golang.org/protobuf v1.36.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/cenkalti/backoff.v2 v2.2.1 h1:eJ9UAg01/HIHG987TwxvnzK2MgxXq97YY6rYDpY9aII= gopkg.in/cenkalti/backoff.v2 v2.2.1 h1:eJ9UAg01/HIHG987TwxvnzK2MgxXq97YY6rYDpY9aII=
@ -1357,6 +1384,7 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8=

View File

@ -192,7 +192,7 @@ func TestEnvVarCommentsRemoved(t *testing.T) {
envVar, exists = envSample["SECRET_TEST_PASS_TWO_VERSION"] envVar, exists = envSample["SECRET_TEST_PASS_TWO_VERSION"]
if !exists { if !exists {
t.Fatal("WITH_COMMENT env var should be present in .env.sample") t.Fatal("SECRET_TEST_PASS_TWO_VERSION env var should be present in .env.sample")
} }
if strings.Contains(envVar, "length") { if strings.Contains(envVar, "length") {

View File

@ -50,6 +50,9 @@ teardown(){
assert_failure assert_failure
assert_output --partial 'locally unstaged changes' assert_output --partial 'locally unstaged changes'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@ -62,6 +65,9 @@ teardown(){
run $ABRA app check "$TEST_APP_DOMAIN" --chaos run $ABRA app check "$TEST_APP_DOMAIN" --chaos
assert_success assert_success
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }

View File

@ -53,6 +53,9 @@ teardown(){
assert_failure assert_failure
assert_output --partial 'locally unstaged changes' assert_output --partial 'locally unstaged changes'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@ -66,6 +69,9 @@ teardown(){
assert_success assert_success
assert_output --partial 'baz' assert_output --partial 'baz'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }

View File

@ -24,6 +24,9 @@ teardown(){
_rm_remote "/etc/*.txt" _rm_remote "/etc/*.txt"
_rm "$BATS_TMPDIR/mydir" _rm "$BATS_TMPDIR/mydir"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@test "validate app argument" { @test "validate app argument" {
@ -34,6 +37,42 @@ teardown(){
assert_failure assert_failure
} }
@test "bail if unstaged changes and no --chaos" {
_mkdir "$BATS_TMPDIR/mydir"
_mkfile "$BATS_TMPDIR/mydir/myfile.txt" "foo"
run bash -c "echo foo >> $ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_success
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
run $ABRA app cp "$TEST_APP_DOMAIN" "$BATS_TMPDIR/mydir" app:/etc
assert_failure
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
}
@test "do not bail if unstaged changes and --chaos" {
_mkdir "$BATS_TMPDIR/mydir"
_mkfile "$BATS_TMPDIR/mydir/myfile.txt" "foo"
run bash -c "echo foo >> $ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_success
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
run $ABRA app cp "$TEST_APP_DOMAIN" "$BATS_TMPDIR/mydir" app:/etc --chaos
assert_success
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
}
@test "error if missing src/dest arguments" { @test "error if missing src/dest arguments" {
run $ABRA app cp "$TEST_APP_DOMAIN" run $ABRA app cp "$TEST_APP_DOMAIN"
assert_failure assert_failure

View File

@ -21,8 +21,8 @@ setup(){
teardown(){ teardown(){
_reset_recipe _reset_recipe
_reset_app
_undeploy_app _undeploy_app
_reset_app
_reset_tags _reset_tags
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
@ -46,6 +46,9 @@ teardown(){
assert_success assert_success
assert_output --partial 'foo' assert_output --partial 'foo'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run $ABRA app deploy "$TEST_APP_DOMAIN" --no-input run $ABRA app deploy "$TEST_APP_DOMAIN" --no-input
assert_failure assert_failure
assert_output --partial 'locally unstaged changes' assert_output --partial 'locally unstaged changes'
@ -62,6 +65,9 @@ teardown(){
assert_success assert_success
assert_output --partial 'foo' assert_output --partial 'foo'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run $ABRA app deploy "$TEST_APP_DOMAIN" \ run $ABRA app deploy "$TEST_APP_DOMAIN" \
--chaos --no-input --no-converge-checks --chaos --no-input --no-converge-checks
assert_success assert_success
@ -423,3 +429,12 @@ teardown(){
assert_success assert_success
assert_output --partial "$latestRelease" assert_output --partial "$latestRelease"
} }
# bats test_tags=slow
@test "no chaos version label if no chaos" {
_deploy_app
run $ABRA app labels "$TEST_APP_DOMAIN" --no-input
assert_success
refute_output --regexp "coop-cloud.abra-test-recipe.$TEST_SERVER.chaos-version"
}

View File

@ -20,8 +20,8 @@ setup(){
teardown(){ teardown(){
_reset_recipe _reset_recipe
_reset_app
_undeploy_app _undeploy_app
_reset_app
_reset_tags _reset_tags
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"

View File

@ -20,8 +20,11 @@ setup(){
teardown(){ teardown(){
_reset_recipe _reset_recipe
_reset_app
_undeploy_app _undeploy_app
_reset_app
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@test "validate app argument" { @test "validate app argument" {
@ -41,6 +44,16 @@ teardown(){
} }
@test "show env version despite --chaos" { @test "show env version despite --chaos" {
run bash -c "echo foo >> $ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_success
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
run $ABRA app env "$TEST_APP_DOMAIN" run $ABRA app env "$TEST_APP_DOMAIN"
assert_success assert_success
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }

View File

@ -21,8 +21,8 @@ setup(){
teardown(){ teardown(){
_reset_recipe _reset_recipe
_reset_app
_undeploy_app _undeploy_app
_reset_app
_reset_tags _reset_tags
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
@ -46,6 +46,9 @@ teardown(){
assert_success assert_success
assert_output --partial 'foo' assert_output --partial 'foo'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run $ABRA app labels "$TEST_APP_DOMAIN" --no-input run $ABRA app labels "$TEST_APP_DOMAIN" --no-input
assert_failure assert_failure
} }
@ -59,6 +62,9 @@ teardown(){
assert_success assert_success
assert_output --partial 'foo' assert_output --partial 'foo'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run $ABRA app labels "$TEST_APP_DOMAIN" --chaos run $ABRA app labels "$TEST_APP_DOMAIN" --chaos
assert_success assert_success
} }

View File

@ -20,6 +20,10 @@ setup(){
teardown(){ teardown(){
_rm_app _rm_app
_reset_recipe _reset_recipe
_reset_tags
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@test "create new app" { @test "create new app" {
@ -47,25 +51,22 @@ teardown(){
assert_success assert_success
assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_equal $(_get_tag_hash 0.3.0+1.21.0) $(_get_current_hash)
run grep -q "TYPE=$TEST_RECIPE:0.3.0+1.21.0" \ run grep -q "TYPE=$TEST_RECIPE:0.3.0+1.21.0" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success assert_success
} }
@test "create new app with chaos commit" { @test "create new app with version commit" {
run $ABRA app new "$TEST_RECIPE" 1e83340e \ tagHash=$(_get_tag_hash "0.3.0+1.21.0")
run $ABRA app new "$TEST_RECIPE" "$tagHash" \
--no-input \ --no-input \
--server "$TEST_SERVER" \ --server "$TEST_SERVER" \
--domain "$TEST_APP_DOMAIN" --domain "$TEST_APP_DOMAIN"
assert_success assert_success
assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
currentHash=$(_get_current_hash) run grep -q "TYPE=$TEST_RECIPE:${tagHash}" \
assert_equal 1e83340e ${currentHash:0:8}
run grep -q "TYPE=$TEST_RECIPE:1e83340e" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success assert_success
} }
@ -101,6 +102,9 @@ teardown(){
assert_failure assert_failure
assert_output --partial 'locally unstaged changes' assert_output --partial 'locally unstaged changes'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@ -122,6 +126,13 @@ teardown(){
assert_success assert_success
assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" status
assert_success
assert_output --partial 'foo'
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@ -167,6 +178,8 @@ teardown(){
# bats test_tags=slow # bats test_tags=slow
@test "generate secrets" { @test "generate secrets" {
latestRelease=$(_latest_release)
run $ABRA app new "$TEST_RECIPE" \ run $ABRA app new "$TEST_RECIPE" \
--no-input \ --no-input \
--server "$TEST_SERVER" \ --server "$TEST_SERVER" \
@ -178,4 +191,64 @@ teardown(){
run $ABRA app secret ls "$TEST_APP_DOMAIN" run $ABRA app secret ls "$TEST_APP_DOMAIN"
assert_success assert_success
assert_output --partial 'test_pass_one' assert_output --partial 'test_pass_one'
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success
}
# bats test_tags=slow
@test "app new from chaos recipe" {
currentHash=$(_get_current_hash)
latestRelease=$(_latest_release)
run bash -c "echo foo >> $ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_success
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
run $ABRA app new "$TEST_RECIPE" \
--no-input \
--server "$TEST_SERVER" \
--domain "$TEST_APP_DOMAIN" \
--secrets \
--chaos
assert_success
assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_output --partial "version: ${currentHash:0:8}"
assert_output --partial "chaos: ${currentHash:0:8}"
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run grep -q "TYPE=$TEST_RECIPE:${currentHash:0:8}+U" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success
}
# bats test_tags=slow
@test "app new, no releases, from chaos recipe" {
currentHash=$(_get_current_hash)
_remove_tags
run bash -c "echo foo >> $ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_success
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
run $ABRA app new "$TEST_RECIPE" \
--no-input \
--server "$TEST_SERVER" \
--domain "$TEST_APP_DOMAIN" \
--secrets \
--chaos
assert_success
assert_exists "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_output --partial "version: ${currentHash:0:8}"
assert_output --partial "chaos: ${currentHash:0:8}"
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run grep -q "TYPE=$TEST_RECIPE:${currentHash:0:8}+U" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success
} }

View File

@ -55,6 +55,9 @@ teardown(){
assert_failure assert_failure
assert_output --partial 'locally unstaged changes' assert_output --partial 'locally unstaged changes'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@ -70,6 +73,9 @@ teardown(){
run $ABRA app ps --chaos "$TEST_APP_DOMAIN" run $ABRA app ps --chaos "$TEST_APP_DOMAIN"
assert_success assert_success
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@ -117,6 +123,8 @@ teardown(){
@test "show ps report" { @test "show ps report" {
_deploy_app _deploy_app
_ensure_env_version "$(_latest_release)"
run $ABRA app ps "$TEST_APP_DOMAIN" run $ABRA app ps "$TEST_APP_DOMAIN"
assert_success assert_success
assert_output --partial 'app' assert_output --partial 'app'

View File

@ -19,6 +19,7 @@ setup(){
} }
teardown(){ teardown(){
_reset_app
_undeploy_app _undeploy_app
_reset_recipe _reset_recipe
} }
@ -185,3 +186,16 @@ teardown(){
assert_failure assert_failure
assert_output --partial "not a known version" assert_output --partial "not a known version"
} }
# bats test_tags=slow
@test "no chaos version label if no chaos" {
_deploy_app
run $ABRA app rollback "$TEST_APP_DOMAIN" \
--no-input --no-converge-checks
assert_success
run $ABRA app labels "$TEST_APP_DOMAIN" --no-input
assert_success
refute_output --regexp "coop-cloud.abra-test-recipe.$TEST_SERVER.chaos-version"
}

View File

@ -20,6 +20,7 @@ setup(){
teardown(){ teardown(){
_undeploy_app _undeploy_app
_reset_app
_reset_recipe _reset_recipe
} }
@ -36,9 +37,8 @@ teardown(){
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false' assert_output --regexp 'CHAOS.*false'
# new deployment # rollback
assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0" assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0"
assert_output --regexp 'CHAOS.*false'
# env version # env version
assert_output --regexp 'CURRENT VERSION.*' + "0.2.0+1.21.0" assert_output --regexp 'CURRENT VERSION.*' + "0.2.0+1.21.0"
@ -62,9 +62,8 @@ teardown(){
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false' assert_output --regexp 'CHAOS.*false'
# new deployment # rollback
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false'
# env version # env version
assert_output --regexp 'CURRENT VERSION.*' + "0.2.0+1.21.0" assert_output --regexp 'CURRENT VERSION.*' + "0.2.0+1.21.0"
@ -74,3 +73,30 @@ teardown(){
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success assert_success
} }
@test "app rollback no .env version" {
run $ABRA app deploy "$TEST_APP_DOMAIN" "0.2.0+1.21.0" \
--no-input --no-converge-checks
assert_success
_wipe_env_version
run $ABRA app rollback "$TEST_APP_DOMAIN" "0.1.0+1.20.0" \
--no-input --no-converge-checks
assert_success
# current deployment
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false'
# rollback
assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0"
# env version
assert_output --regexp 'CURRENT VERSION.*N/A'
assert_output --regexp 'NEW VERSION.*' + "0.2.0+1.21.0"
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success
}

View File

@ -131,6 +131,9 @@ teardown(){
assert_failure assert_failure
assert_output --partial 'locally unstaged changes' assert_output --partial 'locally unstaged changes'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@ -271,6 +274,9 @@ teardown(){
assert_failure assert_failure
assert_output --partial 'locally unstaged changes' assert_output --partial 'locally unstaged changes'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }
@ -319,6 +325,9 @@ teardown(){
assert_failure assert_failure
assert_output --partial 'locally unstaged changes' assert_output --partial 'locally unstaged changes'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }

View File

@ -239,3 +239,16 @@ teardown(){
assert_failure assert_failure
assert_output --partial "not a known version" assert_output --partial "not a known version"
} }
# bats test_tags=slow
@test "no chaos version label if no chaos" {
_deploy_app
run $ABRA app upgrade "$TEST_APP_DOMAIN" \
--no-input --no-converge-checks
assert_success
run $ABRA app labels "$TEST_APP_DOMAIN" --no-input
assert_success
refute_output --regexp "coop-cloud.abra-test-recipe.$TEST_SERVER.chaos-version"
}

View File

@ -35,7 +35,7 @@ teardown(){
assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0" assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0"
assert_output --regexp 'CHAOS.*false' assert_output --regexp 'CHAOS.*false'
# new deployment # upgrade
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false' assert_output --regexp 'CHAOS.*false'
@ -61,7 +61,7 @@ teardown(){
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false' assert_output --regexp 'CHAOS.*false'
# new deployment # upgrade
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false' assert_output --regexp 'CHAOS.*false'
@ -73,3 +73,33 @@ teardown(){
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success assert_success
} }
@test "app upgrade no .env version" {
latestRelease=$(_latest_release)
run $ABRA app deploy "$TEST_APP_DOMAIN" "0.2.0+1.21.0" \
--no-input --no-converge-checks
assert_success
_wipe_env_version
run $ABRA app upgrade "$TEST_APP_DOMAIN" \
--no-input --no-converge-checks --force
assert_success
# current deployment
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false'
# upgrade
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false'
# env version
assert_output --regexp 'CURRENT VERSION.*N/A'
assert_output --regexp 'NEW VERSION.*' + "0.2.0+1.21.0"
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success
}

View File

@ -60,3 +60,7 @@ _get_current_hash() {
_get_n_hash() { _get_n_hash() {
echo $(git -C "$ABRA_DIR/recipes/$TEST_RECIPE" show -s --format="%H" "HEAD~$1") echo $(git -C "$ABRA_DIR/recipes/$TEST_RECIPE" show -s --format="%H" "HEAD~$1")
} }
_git_status() {
echo $(git -C "$ABRA_DIR/recipes/$TEST_RECIPE" status --porcelain)
}

View File

@ -1,7 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
_latest_release(){ _latest_release(){
echo $(git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag -l | tail -n 1) echo $(git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag -l --sort=v:refname | tail -n 1)
} }
_fetch_recipe() { _fetch_recipe() {
@ -22,15 +22,6 @@ _reset_recipe(){
_fetch_recipe _fetch_recipe
} }
_ensure_latest_version(){
latestRelease=$(_latest_release)
if [ ! $latestRelease = "$1" ]; then
echo "expected latest recipe version of '$1', saw: $latestRelease"
return 1
fi
}
_ensure_catalogue(){ _ensure_catalogue(){
if [[ ! -d "$ABRA_DIR/catalogue" ]]; then if [[ ! -d "$ABRA_DIR/catalogue" ]]; then
run git clone https://git.coopcloud.tech/toolshed/recipes-catalogue-json.git $ABRA_DIR/catalogue run git clone https://git.coopcloud.tech/toolshed/recipes-catalogue-json.git $ABRA_DIR/catalogue

View File

@ -28,8 +28,6 @@ teardown(){
# bats test_tags=slow # bats test_tags=slow
@test "install release candidate from script" { @test "install release candidate from script" {
skip "current RC is brokenly specified in the installer script"
run bash -c 'curl https://install.abra.coopcloud.tech | bash -s -- --rc' run bash -c 'curl https://install.abra.coopcloud.tech | bash -s -- --rc'
assert_success assert_success

View File

@ -41,6 +41,9 @@ teardown(){
assert_success assert_success
assert_output --partial 'foo' assert_output --partial 'foo'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run $ABRA recipe lint "$TEST_RECIPE" run $ABRA recipe lint "$TEST_RECIPE"
assert_failure assert_failure
assert_output --partial 'locally unstaged changes' assert_output --partial 'locally unstaged changes'
@ -58,6 +61,9 @@ teardown(){
assert_success assert_success
assert_output --partial 'foo' assert_output --partial 'foo'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run $ABRA recipe lint "$TEST_RECIPE" --chaos run $ABRA recipe lint "$TEST_RECIPE" --chaos
assert_success assert_success

View File

@ -20,6 +20,7 @@ setup(){
teardown() { teardown() {
_reset_recipe _reset_recipe
_reset_tags
} }
@test "validate recipe argument" { @test "validate recipe argument" {
@ -31,8 +32,6 @@ teardown() {
} }
@test "release patch bump" { @test "release patch bump" {
_ensure_latest_version "0.3.0+1.21.0"
run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --patch run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --patch
assert_success assert_success
@ -40,6 +39,12 @@ teardown() {
assert_success assert_success
assert_output --partial 'image: nginx:1.21.6' assert_output --partial 'image: nginx:1.21.6'
# NOTE(d1): ensure the latest tag is the one we expect
_remove_tags
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag \
-a "0.3.0+1.21.0" -m "fake: 0.3.0+1.21.0"
assert_success
run $ABRA recipe sync "$TEST_RECIPE" --no-input --patch run $ABRA recipe sync "$TEST_RECIPE" --no-input --patch
assert_success assert_success
assert_output --partial 'synced label' assert_output --partial 'synced label'
@ -58,8 +63,6 @@ teardown() {
} }
@test "release minor bump" { @test "release minor bump" {
_ensure_latest_version "0.3.0+1.21.0"
run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --minor run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --minor
assert_success assert_success
@ -67,6 +70,12 @@ teardown() {
assert_success assert_success
assert_output --regexp 'image: nginx:1.2.*' assert_output --regexp 'image: nginx:1.2.*'
# NOTE(d1): ensure the latest tag is the one we expect
_remove_tags
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag \
-a "0.3.0+1.21.0" -m "fake: 0.3.0+1.21.0"
assert_success
run $ABRA recipe sync "$TEST_RECIPE" --no-input --minor run $ABRA recipe sync "$TEST_RECIPE" --no-input --minor
assert_success assert_success
assert_output --partial 'synced label' assert_output --partial 'synced label'
@ -102,8 +111,6 @@ teardown() {
} }
@test "release with next release note" { @test "release with next release note" {
_ensure_latest_version "0.3.0+1.21.0"
_mkfile "$ABRA_DIR/recipes/$TEST_RECIPE/release/next" "those are some release notes for the next release" _mkfile "$ABRA_DIR/recipes/$TEST_RECIPE/release/next" "those are some release notes for the next release"
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" add release/next run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" add release/next

View File

@ -40,6 +40,9 @@ teardown(){
run $ABRA recipe sync "$TEST_RECIPE" --no-input --patch run $ABRA recipe sync "$TEST_RECIPE" --no-input --patch
assert_success assert_success
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "M compose.yml ?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_success assert_success
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
@ -58,8 +61,6 @@ teardown(){
} }
@test "sync patch label bump" { @test "sync patch label bump" {
_ensure_latest_version "0.3.0+1.21.0"
run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --patch run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --patch
assert_success assert_success
@ -67,6 +68,12 @@ teardown(){
assert_success assert_success
assert_output --partial 'image: nginx:1.21.6' assert_output --partial 'image: nginx:1.21.6'
# NOTE(d1): ensure the latest tag is the one we expect
_remove_tags
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag \
-a "0.3.0+1.21.0" -m "fake: 0.3.0+1.21.0"
assert_success
run $ABRA recipe sync "$TEST_RECIPE" --no-input --patch run $ABRA recipe sync "$TEST_RECIPE" --no-input --patch
assert_success assert_success
@ -76,8 +83,6 @@ teardown(){
} }
@test "sync minor label bump" { @test "sync minor label bump" {
_ensure_latest_version "0.3.0+1.21.0"
run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --minor run $ABRA recipe upgrade "$TEST_RECIPE" --no-input --minor
assert_success assert_success
@ -85,6 +90,12 @@ teardown(){
assert_success assert_success
assert_output --regexp 'image: nginx:1.2.*' assert_output --regexp 'image: nginx:1.2.*'
# NOTE(d1): ensure the latest tag is the one we expect
_remove_tags
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" tag \
-a "0.3.0+1.21.0" -m "fake: 0.3.0+1.21.0"
assert_success
run $ABRA recipe sync "$TEST_RECIPE" --no-input --minor run $ABRA recipe sync "$TEST_RECIPE" --no-input --minor
assert_success assert_success

View File

@ -54,6 +54,9 @@ teardown(){
assert_failure assert_failure
assert_output --partial 'locally unstaged changes' assert_output --partial 'locally unstaged changes'
assert_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_equal "$(_git_status)" "?? foo"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
} }

View File

@ -29,8 +29,6 @@ teardown(){
# bats test_tags=slow # bats test_tags=slow
@test "abra upgrade release candidate" { @test "abra upgrade release candidate" {
skip "TODO: RC publishing broke somehow, needs investigation"
run $ABRA upgrade --rc run $ABRA upgrade --rc
assert_success assert_success
assert_output --partial 'Public interest infrastructure' assert_output --partial 'Public interest infrastructure'

View File

@ -81,7 +81,7 @@ func setRawTerminal(fd uintptr) (*State, error) {
return makeRaw(fd) return makeRaw(fd)
} }
func setRawTerminalOutput(fd uintptr) (*State, error) { func setRawTerminalOutput(uintptr) (*State, error) {
return nil, nil return nil, nil
} }

View File

@ -298,7 +298,8 @@ type ResourceMetrics struct {
// A list of metrics that originate from a resource. // A list of metrics that originate from a resource.
ScopeMetrics []*ScopeMetrics `protobuf:"bytes,2,rep,name=scope_metrics,json=scopeMetrics,proto3" json:"scope_metrics,omitempty"` ScopeMetrics []*ScopeMetrics `protobuf:"bytes,2,rep,name=scope_metrics,json=scopeMetrics,proto3" json:"scope_metrics,omitempty"`
// The Schema URL, if known. This is the identifier of the Schema that the resource data // The Schema URL, if known. This is the identifier of the Schema that the resource data
// is recorded in. To learn more about Schema URL see // is recorded in. Notably, the last part of the URL path is the version number of the
// schema: http[s]://server[:port]/path/<version>. To learn more about Schema URL see
// https://opentelemetry.io/docs/specs/otel/schemas/#schema-url // https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
// This schema_url applies to the data in the "resource" field. It does not apply // This schema_url applies to the data in the "resource" field. It does not apply
// to the data in the "scope_metrics" field which have their own schema_url field. // to the data in the "scope_metrics" field which have their own schema_url field.
@ -371,7 +372,8 @@ type ScopeMetrics struct {
// A list of metrics that originate from an instrumentation library. // A list of metrics that originate from an instrumentation library.
Metrics []*Metric `protobuf:"bytes,2,rep,name=metrics,proto3" json:"metrics,omitempty"` Metrics []*Metric `protobuf:"bytes,2,rep,name=metrics,proto3" json:"metrics,omitempty"`
// The Schema URL, if known. This is the identifier of the Schema that the metric data // The Schema URL, if known. This is the identifier of the Schema that the metric data
// is recorded in. To learn more about Schema URL see // is recorded in. Notably, the last part of the URL path is the version number of the
// schema: http[s]://server[:port]/path/<version>. To learn more about Schema URL see
// https://opentelemetry.io/docs/specs/otel/schemas/#schema-url // https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
// This schema_url applies to all metrics in the "metrics" field. // This schema_url applies to all metrics in the "metrics" field.
SchemaUrl string `protobuf:"bytes,3,opt,name=schema_url,json=schemaUrl,proto3" json:"schema_url,omitempty"` SchemaUrl string `protobuf:"bytes,3,opt,name=schema_url,json=schemaUrl,proto3" json:"schema_url,omitempty"`
@ -1165,7 +1167,7 @@ type HistogramDataPoint struct {
// events, and is assumed to be monotonic over the values of these events. // events, and is assumed to be monotonic over the values of these events.
// Negative events *can* be recorded, but sum should not be filled out when // Negative events *can* be recorded, but sum should not be filled out when
// doing so. This is specifically to enforce compatibility w/ OpenMetrics, // doing so. This is specifically to enforce compatibility w/ OpenMetrics,
// see: https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#histogram // see: https://github.com/prometheus/OpenMetrics/blob/v1.0.0/specification/OpenMetrics.md#histogram
Sum *float64 `protobuf:"fixed64,5,opt,name=sum,proto3,oneof" json:"sum,omitempty"` Sum *float64 `protobuf:"fixed64,5,opt,name=sum,proto3,oneof" json:"sum,omitempty"`
// bucket_counts is an optional field contains the count values of histogram // bucket_counts is an optional field contains the count values of histogram
// for each bucket. // for each bucket.
@ -1347,7 +1349,7 @@ type ExponentialHistogramDataPoint struct {
// events, and is assumed to be monotonic over the values of these events. // events, and is assumed to be monotonic over the values of these events.
// Negative events *can* be recorded, but sum should not be filled out when // Negative events *can* be recorded, but sum should not be filled out when
// doing so. This is specifically to enforce compatibility w/ OpenMetrics, // doing so. This is specifically to enforce compatibility w/ OpenMetrics,
// see: https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#histogram // see: https://github.com/prometheus/OpenMetrics/blob/v1.0.0/specification/OpenMetrics.md#histogram
Sum *float64 `protobuf:"fixed64,5,opt,name=sum,proto3,oneof" json:"sum,omitempty"` Sum *float64 `protobuf:"fixed64,5,opt,name=sum,proto3,oneof" json:"sum,omitempty"`
// scale describes the resolution of the histogram. Boundaries are // scale describes the resolution of the histogram. Boundaries are
// located at powers of the base, where: // located at powers of the base, where:
@ -1560,7 +1562,7 @@ type SummaryDataPoint struct {
// events, and is assumed to be monotonic over the values of these events. // events, and is assumed to be monotonic over the values of these events.
// Negative events *can* be recorded, but sum should not be filled out when // Negative events *can* be recorded, but sum should not be filled out when
// doing so. This is specifically to enforce compatibility w/ OpenMetrics, // doing so. This is specifically to enforce compatibility w/ OpenMetrics,
// see: https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#summary // see: https://github.com/prometheus/OpenMetrics/blob/v1.0.0/specification/OpenMetrics.md#summary
Sum float64 `protobuf:"fixed64,5,opt,name=sum,proto3" json:"sum,omitempty"` Sum float64 `protobuf:"fixed64,5,opt,name=sum,proto3" json:"sum,omitempty"`
// (Optional) list of values at different quantiles of the distribution calculated // (Optional) list of values at different quantiles of the distribution calculated
// from the current snapshot. The quantiles must be strictly increasing. // from the current snapshot. The quantiles must be strictly increasing.

View File

@ -311,7 +311,8 @@ type ResourceSpans struct {
// A list of ScopeSpans that originate from a resource. // A list of ScopeSpans that originate from a resource.
ScopeSpans []*ScopeSpans `protobuf:"bytes,2,rep,name=scope_spans,json=scopeSpans,proto3" json:"scope_spans,omitempty"` ScopeSpans []*ScopeSpans `protobuf:"bytes,2,rep,name=scope_spans,json=scopeSpans,proto3" json:"scope_spans,omitempty"`
// The Schema URL, if known. This is the identifier of the Schema that the resource data // The Schema URL, if known. This is the identifier of the Schema that the resource data
// is recorded in. To learn more about Schema URL see // is recorded in. Notably, the last part of the URL path is the version number of the
// schema: http[s]://server[:port]/path/<version>. To learn more about Schema URL see
// https://opentelemetry.io/docs/specs/otel/schemas/#schema-url // https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
// This schema_url applies to the data in the "resource" field. It does not apply // This schema_url applies to the data in the "resource" field. It does not apply
// to the data in the "scope_spans" field which have their own schema_url field. // to the data in the "scope_spans" field which have their own schema_url field.
@ -384,7 +385,8 @@ type ScopeSpans struct {
// A list of Spans that originate from an instrumentation scope. // A list of Spans that originate from an instrumentation scope.
Spans []*Span `protobuf:"bytes,2,rep,name=spans,proto3" json:"spans,omitempty"` Spans []*Span `protobuf:"bytes,2,rep,name=spans,proto3" json:"spans,omitempty"`
// The Schema URL, if known. This is the identifier of the Schema that the span data // The Schema URL, if known. This is the identifier of the Schema that the span data
// is recorded in. To learn more about Schema URL see // is recorded in. Notably, the last part of the URL path is the version number of the
// schema: http[s]://server[:port]/path/<version>. To learn more about Schema URL see
// https://opentelemetry.io/docs/specs/otel/schemas/#schema-url // https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
// This schema_url applies to all spans and span events in the "spans" field. // This schema_url applies to all spans and span events in the "spans" field.
SchemaUrl string `protobuf:"bytes,3,opt,name=schema_url,json=schemaUrl,proto3" json:"schema_url,omitempty"` SchemaUrl string `protobuf:"bytes,3,opt,name=schema_url,json=schemaUrl,proto3" json:"schema_url,omitempty"`

View File

@ -60,7 +60,7 @@ func configFromServer(h1 *http.Server, h2 *Server) http2Config {
return conf return conf
} }
// configFromServer merges configuration settings from h2 and h2.t1.HTTP2 // configFromTransport merges configuration settings from h2 and h2.t1.HTTP2
// (the net/http Transport). // (the net/http Transport).
func configFromTransport(h2 *Transport) http2Config { func configFromTransport(h2 *Transport) http2Config {
conf := http2Config{ conf := http2Config{

View File

@ -13,7 +13,7 @@ func fillNetHTTPServerConfig(conf *http2Config, srv *http.Server) {
fillNetHTTPConfig(conf, srv.HTTP2) fillNetHTTPConfig(conf, srv.HTTP2)
} }
// fillNetHTTPServerConfig sets fields in conf from tr.HTTP2. // fillNetHTTPTransportConfig sets fields in conf from tr.HTTP2.
func fillNetHTTPTransportConfig(conf *http2Config, tr *http.Transport) { func fillNetHTTPTransportConfig(conf *http2Config, tr *http.Transport) {
fillNetHTTPConfig(conf, tr.HTTP2) fillNetHTTPConfig(conf, tr.HTTP2)
} }

View File

@ -375,6 +375,7 @@ type ClientConn struct {
doNotReuse bool // whether conn is marked to not be reused for any future requests doNotReuse bool // whether conn is marked to not be reused for any future requests
closing bool closing bool
closed bool closed bool
closedOnIdle bool // true if conn was closed for idleness
seenSettings bool // true if we've seen a settings frame, false otherwise seenSettings bool // true if we've seen a settings frame, false otherwise
seenSettingsChan chan struct{} // closed when seenSettings is true or frame reading fails seenSettingsChan chan struct{} // closed when seenSettings is true or frame reading fails
wantSettingsAck bool // we sent a SETTINGS frame and haven't heard back wantSettingsAck bool // we sent a SETTINGS frame and haven't heard back
@ -1089,10 +1090,12 @@ func (cc *ClientConn) idleStateLocked() (st clientConnIdleState) {
// If this connection has never been used for a request and is closed, // If this connection has never been used for a request and is closed,
// then let it take a request (which will fail). // then let it take a request (which will fail).
// If the conn was closed for idleness, we're racing the idle timer;
// don't try to use the conn. (Issue #70515.)
// //
// This avoids a situation where an error early in a connection's lifetime // This avoids a situation where an error early in a connection's lifetime
// goes unreported. // goes unreported.
if cc.nextStreamID == 1 && cc.streamsReserved == 0 && cc.closed { if cc.nextStreamID == 1 && cc.streamsReserved == 0 && cc.closed && !cc.closedOnIdle {
st.canTakeNewRequest = true st.canTakeNewRequest = true
} }
@ -1155,6 +1158,7 @@ func (cc *ClientConn) closeIfIdle() {
return return
} }
cc.closed = true cc.closed = true
cc.closedOnIdle = true
nextID := cc.nextStreamID nextID := cc.nextStreamID
// TODO: do clients send GOAWAY too? maybe? Just Close: // TODO: do clients send GOAWAY too? maybe? Just Close:
cc.mu.Unlock() cc.mu.Unlock()
@ -2434,9 +2438,12 @@ func (rl *clientConnReadLoop) cleanup() {
// This avoids a situation where new connections are constantly created, // This avoids a situation where new connections are constantly created,
// added to the pool, fail, and are removed from the pool, without any error // added to the pool, fail, and are removed from the pool, without any error
// being surfaced to the user. // being surfaced to the user.
const unusedWaitTime = 5 * time.Second unusedWaitTime := 5 * time.Second
if cc.idleTimeout > 0 && unusedWaitTime > cc.idleTimeout {
unusedWaitTime = cc.idleTimeout
}
idleTime := cc.t.now().Sub(cc.lastActive) idleTime := cc.t.now().Sub(cc.lastActive)
if atomic.LoadUint32(&cc.atomicReused) == 0 && idleTime < unusedWaitTime { if atomic.LoadUint32(&cc.atomicReused) == 0 && idleTime < unusedWaitTime && !cc.closedOnIdle {
cc.idleTimer = cc.t.afterFunc(unusedWaitTime-idleTime, func() { cc.idleTimer = cc.t.afterFunc(unusedWaitTime-idleTime, func() {
cc.t.connPool().MarkDead(cc) cc.t.connPool().MarkDead(cc)
}) })

View File

@ -246,6 +246,18 @@ func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err e
return sendfile(outfd, infd, offset, count) return sendfile(outfd, infd, offset, count)
} }
func Dup3(oldfd, newfd, flags int) error {
if oldfd == newfd || flags&^O_CLOEXEC != 0 {
return EINVAL
}
how := F_DUP2FD
if flags&O_CLOEXEC != 0 {
how = F_DUP2FD_CLOEXEC
}
_, err := fcntl(oldfd, how, newfd)
return err
}
/* /*
* Exposed directly * Exposed directly
*/ */

View File

@ -43,8 +43,8 @@ type DLL struct {
// LoadDLL loads DLL file into memory. // LoadDLL loads DLL file into memory.
// //
// Warning: using LoadDLL without an absolute path name is subject to // Warning: using LoadDLL without an absolute path name is subject to
// DLL preloading attacks. To safely load a system DLL, use LazyDLL // DLL preloading attacks. To safely load a system DLL, use [NewLazySystemDLL],
// with System set to true, or use LoadLibraryEx directly. // or use [LoadLibraryEx] directly.
func LoadDLL(name string) (dll *DLL, err error) { func LoadDLL(name string) (dll *DLL, err error) {
namep, err := UTF16PtrFromString(name) namep, err := UTF16PtrFromString(name)
if err != nil { if err != nil {
@ -271,6 +271,9 @@ func (d *LazyDLL) NewProc(name string) *LazyProc {
} }
// NewLazyDLL creates new LazyDLL associated with DLL file. // NewLazyDLL creates new LazyDLL associated with DLL file.
//
// Warning: using NewLazyDLL without an absolute path name is subject to
// DLL preloading attacks. To safely load a system DLL, use [NewLazySystemDLL].
func NewLazyDLL(name string) *LazyDLL { func NewLazyDLL(name string) *LazyDLL {
return &LazyDLL{Name: name} return &LazyDLL{Name: name}
} }
@ -410,7 +413,3 @@ func loadLibraryEx(name string, system bool) (*DLL, error) {
} }
return &DLL{Name: name, Handle: h}, nil return &DLL{Name: name, Handle: h}, nil
} }
type errString string
func (s errString) Error() string { return string(s) }

View File

@ -322,6 +322,7 @@ type jsonPackage struct {
ImportPath string ImportPath string
Dir string Dir string
Name string Name string
Target string
Export string Export string
GoFiles []string GoFiles []string
CompiledGoFiles []string CompiledGoFiles []string
@ -506,6 +507,7 @@ func (state *golistState) createDriverResponse(words ...string) (*DriverResponse
Name: p.Name, Name: p.Name,
ID: p.ImportPath, ID: p.ImportPath,
Dir: p.Dir, Dir: p.Dir,
Target: p.Target,
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles), GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles), CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
OtherFiles: absJoin(p.Dir, otherFiles(p)...), OtherFiles: absJoin(p.Dir, otherFiles(p)...),
@ -811,6 +813,9 @@ func jsonFlag(cfg *Config, goVersion int) string {
if cfg.Mode&NeedEmbedPatterns != 0 { if cfg.Mode&NeedEmbedPatterns != 0 {
addFields("EmbedPatterns") addFields("EmbedPatterns")
} }
if cfg.Mode&NeedTarget != 0 {
addFields("Target")
}
return "-json=" + strings.Join(fields, ",") return "-json=" + strings.Join(fields, ",")
} }

View File

@ -27,6 +27,7 @@ var modes = [...]struct {
{NeedModule, "NeedModule"}, {NeedModule, "NeedModule"},
{NeedEmbedFiles, "NeedEmbedFiles"}, {NeedEmbedFiles, "NeedEmbedFiles"},
{NeedEmbedPatterns, "NeedEmbedPatterns"}, {NeedEmbedPatterns, "NeedEmbedPatterns"},
{NeedTarget, "NeedTarget"},
} }
func (mode LoadMode) String() string { func (mode LoadMode) String() string {

View File

@ -118,6 +118,9 @@ const (
// NeedEmbedPatterns adds EmbedPatterns. // NeedEmbedPatterns adds EmbedPatterns.
NeedEmbedPatterns NeedEmbedPatterns
// NeedTarget adds Target.
NeedTarget
// Be sure to update loadmode_string.go when adding new items! // Be sure to update loadmode_string.go when adding new items!
) )
@ -479,6 +482,10 @@ type Package struct {
// information for the package as provided by the build system. // information for the package as provided by the build system.
ExportFile string ExportFile string
// Target is the absolute install path of the .a file, for libraries,
// and of the executable file, for binaries.
Target string
// Imports maps import paths appearing in the package's Go source files // Imports maps import paths appearing in the package's Go source files
// to corresponding loaded Packages. // to corresponding loaded Packages.
Imports map[string]*Package Imports map[string]*Package

View File

@ -2,30 +2,35 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// Package typeutil defines various utilities for types, such as Map, // Package typeutil defines various utilities for types, such as [Map],
// a mapping from types.Type to any values. // a hash table that maps [types.Type] to any value.
package typeutil // import "golang.org/x/tools/go/types/typeutil" package typeutil
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"go/types" "go/types"
"reflect" "hash/maphash"
"unsafe"
"golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typeparams"
) )
// Map is a hash-table-based mapping from types (types.Type) to // Map is a hash-table-based mapping from types (types.Type) to
// arbitrary any values. The concrete types that implement // arbitrary values. The concrete types that implement
// the Type interface are pointers. Since they are not canonicalized, // the Type interface are pointers. Since they are not canonicalized,
// == cannot be used to check for equivalence, and thus we cannot // == cannot be used to check for equivalence, and thus we cannot
// simply use a Go map. // simply use a Go map.
// //
// Just as with map[K]V, a nil *Map is a valid empty map. // Just as with map[K]V, a nil *Map is a valid empty map.
// //
// Not thread-safe. // Read-only map operations ([Map.At], [Map.Len], and so on) may
// safely be called concurrently.
//
// TODO(adonovan): deprecate in favor of https://go.dev/issues/69420
// and 69559, if the latter proposals for a generic hash-map type and
// a types.Hash function are accepted.
type Map struct { type Map struct {
hasher Hasher // shared by many Maps
table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused
length int // number of map entries length int // number of map entries
} }
@ -36,35 +41,17 @@ type entry struct {
value any value any
} }
// SetHasher sets the hasher used by Map. // SetHasher has no effect.
// //
// All Hashers are functionally equivalent but contain internal state // It is a relic of an optimization that is no longer profitable. Do
// used to cache the results of hashing previously seen types. // not use [Hasher], [MakeHasher], or [SetHasher] in new code.
// func (m *Map) SetHasher(Hasher) {}
// A single Hasher created by MakeHasher() may be shared among many
// Maps. This is recommended if the instances have many keys in
// common, as it will amortize the cost of hash computation.
//
// A Hasher may grow without bound as new types are seen. Even when a
// type is deleted from the map, the Hasher never shrinks, since other
// types in the map may reference the deleted type indirectly.
//
// Hashers are not thread-safe, and read-only operations such as
// Map.Lookup require updates to the hasher, so a full Mutex lock (not a
// read-lock) is require around all Map operations if a shared
// hasher is accessed from multiple threads.
//
// If SetHasher is not called, the Map will create a private hasher at
// the first call to Insert.
func (m *Map) SetHasher(hasher Hasher) {
m.hasher = hasher
}
// Delete removes the entry with the given key, if any. // Delete removes the entry with the given key, if any.
// It returns true if the entry was found. // It returns true if the entry was found.
func (m *Map) Delete(key types.Type) bool { func (m *Map) Delete(key types.Type) bool {
if m != nil && m.table != nil { if m != nil && m.table != nil {
hash := m.hasher.Hash(key) hash := hash(key)
bucket := m.table[hash] bucket := m.table[hash]
for i, e := range bucket { for i, e := range bucket {
if e.key != nil && types.Identical(key, e.key) { if e.key != nil && types.Identical(key, e.key) {
@ -83,7 +70,7 @@ func (m *Map) Delete(key types.Type) bool {
// The result is nil if the entry is not present. // The result is nil if the entry is not present.
func (m *Map) At(key types.Type) any { func (m *Map) At(key types.Type) any {
if m != nil && m.table != nil { if m != nil && m.table != nil {
for _, e := range m.table[m.hasher.Hash(key)] { for _, e := range m.table[hash(key)] {
if e.key != nil && types.Identical(key, e.key) { if e.key != nil && types.Identical(key, e.key) {
return e.value return e.value
} }
@ -96,7 +83,7 @@ func (m *Map) At(key types.Type) any {
// and returns the previous entry, if any. // and returns the previous entry, if any.
func (m *Map) Set(key types.Type, value any) (prev any) { func (m *Map) Set(key types.Type, value any) (prev any) {
if m.table != nil { if m.table != nil {
hash := m.hasher.Hash(key) hash := hash(key)
bucket := m.table[hash] bucket := m.table[hash]
var hole *entry var hole *entry
for i, e := range bucket { for i, e := range bucket {
@ -115,10 +102,7 @@ func (m *Map) Set(key types.Type, value any) (prev any) {
m.table[hash] = append(bucket, entry{key, value}) m.table[hash] = append(bucket, entry{key, value})
} }
} else { } else {
if m.hasher.memo == nil { hash := hash(key)
m.hasher = MakeHasher()
}
hash := m.hasher.Hash(key)
m.table = map[uint32][]entry{hash: {entry{key, value}}} m.table = map[uint32][]entry{hash: {entry{key, value}}}
} }
@ -195,53 +179,35 @@ func (m *Map) KeysString() string {
return m.toString(false) return m.toString(false)
} }
//////////////////////////////////////////////////////////////////////// // -- Hasher --
// Hasher
// A Hasher maps each type to its hash value. // hash returns the hash of type t.
// For efficiency, a hasher uses memoization; thus its memory // TODO(adonovan): replace by types.Hash when Go proposal #69420 is accepted.
// footprint grows monotonically over time. func hash(t types.Type) uint32 {
// Hashers are not thread-safe. return theHasher.Hash(t)
// Hashers have reference semantics.
// Call MakeHasher to create a Hasher.
type Hasher struct {
memo map[types.Type]uint32
// ptrMap records pointer identity.
ptrMap map[any]uint32
// sigTParams holds type parameters from the signature being hashed.
// Signatures are considered identical modulo renaming of type parameters, so
// within the scope of a signature type the identity of the signature's type
// parameters is just their index.
//
// Since the language does not currently support referring to uninstantiated
// generic types or functions, and instantiated signatures do not have type
// parameter lists, we should never encounter a second non-empty type
// parameter list when hashing a generic signature.
sigTParams *types.TypeParamList
} }
// MakeHasher returns a new Hasher instance. // A Hasher provides a [Hasher.Hash] method to map a type to its hash value.
func MakeHasher() Hasher { // Hashers are stateless, and all are equivalent.
return Hasher{ type Hasher struct{}
memo: make(map[types.Type]uint32),
ptrMap: make(map[any]uint32), var theHasher Hasher
sigTParams: nil,
} // MakeHasher returns Hasher{}.
} // Hashers are stateless; all are equivalent.
func MakeHasher() Hasher { return theHasher }
// Hash computes a hash value for the given type t such that // Hash computes a hash value for the given type t such that
// Identical(t, t') => Hash(t) == Hash(t'). // Identical(t, t') => Hash(t) == Hash(t').
func (h Hasher) Hash(t types.Type) uint32 { func (h Hasher) Hash(t types.Type) uint32 {
hash, ok := h.memo[t] return hasher{inGenericSig: false}.hash(t)
if !ok {
hash = h.hashFor(t)
h.memo[t] = hash
}
return hash
} }
// hasher holds the state of a single Hash traversal: whether we are
// inside the signature of a generic function; this is used to
// optimize [hasher.hashTypeParam].
type hasher struct{ inGenericSig bool }
// hashString computes the FowlerNollVo hash of s. // hashString computes the FowlerNollVo hash of s.
func hashString(s string) uint32 { func hashString(s string) uint32 {
var h uint32 var h uint32
@ -252,21 +218,21 @@ func hashString(s string) uint32 {
return h return h
} }
// hashFor computes the hash of t. // hash computes the hash of t.
func (h Hasher) hashFor(t types.Type) uint32 { func (h hasher) hash(t types.Type) uint32 {
// See Identical for rationale. // See Identical for rationale.
switch t := t.(type) { switch t := t.(type) {
case *types.Basic: case *types.Basic:
return uint32(t.Kind()) return uint32(t.Kind())
case *types.Alias: case *types.Alias:
return h.Hash(types.Unalias(t)) return h.hash(types.Unalias(t))
case *types.Array: case *types.Array:
return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) return 9043 + 2*uint32(t.Len()) + 3*h.hash(t.Elem())
case *types.Slice: case *types.Slice:
return 9049 + 2*h.Hash(t.Elem()) return 9049 + 2*h.hash(t.Elem())
case *types.Struct: case *types.Struct:
var hash uint32 = 9059 var hash uint32 = 9059
@ -277,12 +243,12 @@ func (h Hasher) hashFor(t types.Type) uint32 {
} }
hash += hashString(t.Tag(i)) hash += hashString(t.Tag(i))
hash += hashString(f.Name()) // (ignore f.Pkg) hash += hashString(f.Name()) // (ignore f.Pkg)
hash += h.Hash(f.Type()) hash += h.hash(f.Type())
} }
return hash return hash
case *types.Pointer: case *types.Pointer:
return 9067 + 2*h.Hash(t.Elem()) return 9067 + 2*h.hash(t.Elem())
case *types.Signature: case *types.Signature:
var hash uint32 = 9091 var hash uint32 = 9091
@ -290,33 +256,11 @@ func (h Hasher) hashFor(t types.Type) uint32 {
hash *= 8863 hash *= 8863
} }
// Use a separate hasher for types inside of the signature, where type
// parameter identity is modified to be (index, constraint). We must use a
// new memo for this hasher as type identity may be affected by this
// masking. For example, in func[T any](*T), the identity of *T depends on
// whether we are mapping the argument in isolation, or recursively as part
// of hashing the signature.
//
// We should never encounter a generic signature while hashing another
// generic signature, but defensively set sigTParams only if h.mask is
// unset.
tparams := t.TypeParams() tparams := t.TypeParams()
if h.sigTParams == nil && tparams.Len() != 0 { for i := range tparams.Len() {
h = Hasher{ h.inGenericSig = true
// There may be something more efficient than discarding the existing
// memo, but it would require detecting whether types are 'tainted' by
// references to type parameters.
memo: make(map[types.Type]uint32),
// Re-using ptrMap ensures that pointer identity is preserved in this
// hasher.
ptrMap: h.ptrMap,
sigTParams: tparams,
}
}
for i := 0; i < tparams.Len(); i++ {
tparam := tparams.At(i) tparam := tparams.At(i)
hash += 7 * h.Hash(tparam.Constraint()) hash += 7 * h.hash(tparam.Constraint())
} }
return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())
@ -350,17 +294,17 @@ func (h Hasher) hashFor(t types.Type) uint32 {
return hash return hash
case *types.Map: case *types.Map:
return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem()) return 9109 + 2*h.hash(t.Key()) + 3*h.hash(t.Elem())
case *types.Chan: case *types.Chan:
return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) return 9127 + 2*uint32(t.Dir()) + 3*h.hash(t.Elem())
case *types.Named: case *types.Named:
hash := h.hashPtr(t.Obj()) hash := h.hashTypeName(t.Obj())
targs := t.TypeArgs() targs := t.TypeArgs()
for i := 0; i < targs.Len(); i++ { for i := 0; i < targs.Len(); i++ {
targ := targs.At(i) targ := targs.At(i)
hash += 2 * h.Hash(targ) hash += 2 * h.hash(targ)
} }
return hash return hash
@ -374,17 +318,17 @@ func (h Hasher) hashFor(t types.Type) uint32 {
panic(fmt.Sprintf("%T: %v", t, t)) panic(fmt.Sprintf("%T: %v", t, t))
} }
func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { func (h hasher) hashTuple(tuple *types.Tuple) uint32 {
// See go/types.identicalTypes for rationale. // See go/types.identicalTypes for rationale.
n := tuple.Len() n := tuple.Len()
hash := 9137 + 2*uint32(n) hash := 9137 + 2*uint32(n)
for i := 0; i < n; i++ { for i := range n {
hash += 3 * h.Hash(tuple.At(i).Type()) hash += 3 * h.hash(tuple.At(i).Type())
} }
return hash return hash
} }
func (h Hasher) hashUnion(t *types.Union) uint32 { func (h hasher) hashUnion(t *types.Union) uint32 {
// Hash type restrictions. // Hash type restrictions.
terms, err := typeparams.UnionTermSet(t) terms, err := typeparams.UnionTermSet(t)
// if err != nil t has invalid type restrictions. Fall back on a non-zero // if err != nil t has invalid type restrictions. Fall back on a non-zero
@ -395,11 +339,11 @@ func (h Hasher) hashUnion(t *types.Union) uint32 {
return h.hashTermSet(terms) return h.hashTermSet(terms)
} }
func (h Hasher) hashTermSet(terms []*types.Term) uint32 { func (h hasher) hashTermSet(terms []*types.Term) uint32 {
hash := 9157 + 2*uint32(len(terms)) hash := 9157 + 2*uint32(len(terms))
for _, term := range terms { for _, term := range terms {
// term order is not significant. // term order is not significant.
termHash := h.Hash(term.Type()) termHash := h.hash(term.Type())
if term.Tilde() { if term.Tilde() {
termHash *= 9161 termHash *= 9161
} }
@ -408,36 +352,42 @@ func (h Hasher) hashTermSet(terms []*types.Term) uint32 {
return hash return hash
} }
// hashTypeParam returns a hash of the type parameter t, with a hash value // hashTypeParam returns the hash of a type parameter.
// depending on whether t is contained in h.sigTParams. func (h hasher) hashTypeParam(t *types.TypeParam) uint32 {
// // Within the signature of a generic function, TypeParams are
// If h.sigTParams is set and contains t, then we are in the process of hashing // identical if they have the same index and constraint, so we
// a signature, and the hash value of t must depend only on t's index and // hash them based on index.
// constraint: signatures are considered identical modulo type parameter //
// renaming. To avoid infinite recursion, we only hash the type parameter // When we are outside a generic function, free TypeParams are
// index, and rely on types.Identical to handle signatures where constraints // identical iff they are the same object, so we can use a
// are not identical. // more discriminating hash consistent with object identity.
// // This optimization saves [Map] about 4% when hashing all the
// Otherwise the hash of t depends only on t's pointer identity. // types.Info.Types in the forward closure of net/http.
func (h Hasher) hashTypeParam(t *types.TypeParam) uint32 { if !h.inGenericSig {
if h.sigTParams != nil { // Optimization: outside a generic function signature,
i := t.Index() // use a more discrimating hash consistent with object identity.
if i >= 0 && i < h.sigTParams.Len() && t == h.sigTParams.At(i) { return h.hashTypeName(t.Obj())
return 9173 + 3*uint32(i)
}
} }
return h.hashPtr(t.Obj()) return 9173 + 3*uint32(t.Index())
} }
// hashPtr hashes the pointer identity of ptr. It uses h.ptrMap to ensure that var theSeed = maphash.MakeSeed()
// pointers values are not dependent on the GC.
func (h Hasher) hashPtr(ptr any) uint32 { // hashTypeName hashes the pointer of tname.
if hash, ok := h.ptrMap[ptr]; ok { func (hasher) hashTypeName(tname *types.TypeName) uint32 {
return hash // Since types.Identical uses == to compare TypeNames,
} // the Hash function uses maphash.Comparable.
hash := uint32(reflect.ValueOf(ptr).Pointer()) // TODO(adonovan): or will, when it becomes available in go1.24.
h.ptrMap[ptr] = hash // In the meantime we use the pointer's numeric value.
return hash //
// hash := maphash.Comparable(theSeed, tname)
//
// (Another approach would be to hash the name and package
// path, and whether or not it is a package-level typename. It
// is rare for a package to define multiple local types with
// the same name.)
hash := uintptr(unsafe.Pointer(tname))
return uint32(hash ^ (hash >> 32))
} }
// shallowHash computes a hash of t without looking at any of its // shallowHash computes a hash of t without looking at any of its
@ -454,7 +404,7 @@ func (h Hasher) hashPtr(ptr any) uint32 {
// include m itself; there is no mention of the named type X that // include m itself; there is no mention of the named type X that
// might help us break the cycle. // might help us break the cycle.
// (See comment in go/types.identical, case *Interface, for more.) // (See comment in go/types.identical, case *Interface, for more.)
func (h Hasher) shallowHash(t types.Type) uint32 { func (h hasher) shallowHash(t types.Type) uint32 {
// t is the type of an interface method (Signature), // t is the type of an interface method (Signature),
// its params or results (Tuples), or their immediate // its params or results (Tuples), or their immediate
// elements (mostly Slice, Pointer, Basic, Named), // elements (mostly Slice, Pointer, Basic, Named),
@ -475,7 +425,7 @@ func (h Hasher) shallowHash(t types.Type) uint32 {
case *types.Tuple: case *types.Tuple:
n := t.Len() n := t.Len()
hash := 9137 + 2*uint32(n) hash := 9137 + 2*uint32(n)
for i := 0; i < n; i++ { for i := range n {
hash += 53471161 * h.shallowHash(t.At(i).Type()) hash += 53471161 * h.shallowHash(t.At(i).Type())
} }
return hash return hash
@ -508,10 +458,10 @@ func (h Hasher) shallowHash(t types.Type) uint32 {
return 9127 return 9127
case *types.Named: case *types.Named:
return h.hashPtr(t.Obj()) return h.hashTypeName(t.Obj())
case *types.TypeParam: case *types.TypeParam:
return h.hashPtr(t.Obj()) return h.hashTypeParam(t)
} }
panic(fmt.Sprintf("shallowHash: %T: %v", t, t)) panic(fmt.Sprintf("shallowHash: %T: %v", t, t))
} }

View File

@ -2,52 +2,183 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go. // This file should be kept in sync with $GOROOT/src/internal/exportdata/exportdata.go.
// This file also additionally implements FindExportData for gcexportdata.NewReader.
// This file implements FindExportData.
package gcimporter package gcimporter
import ( import (
"bufio" "bufio"
"bytes"
"errors"
"fmt" "fmt"
"go/build"
"io" "io"
"strconv" "os"
"os/exec"
"path/filepath"
"strings" "strings"
"sync"
) )
func readGopackHeader(r *bufio.Reader) (name string, size int64, err error) {
// See $GOROOT/include/ar.h.
hdr := make([]byte, 16+12+6+6+8+10+2)
_, err = io.ReadFull(r, hdr)
if err != nil {
return
}
// leave for debugging
if false {
fmt.Printf("header: %s", hdr)
}
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
length, err := strconv.Atoi(s)
size = int64(length)
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
err = fmt.Errorf("invalid archive header")
return
}
name = strings.TrimSpace(string(hdr[:16]))
return
}
// FindExportData positions the reader r at the beginning of the // FindExportData positions the reader r at the beginning of the
// export data section of an underlying cmd/compile created archive // export data section of an underlying cmd/compile created archive
// file by reading from it. The reader must be positioned at the // file by reading from it. The reader must be positioned at the
// start of the file before calling this function. // start of the file before calling this function.
// The size result is the length of the export data in bytes. // This returns the length of the export data in bytes.
// //
// This function is needed by [gcexportdata.Read], which must // This function is needed by [gcexportdata.Read], which must
// accept inputs produced by the last two releases of cmd/compile, // accept inputs produced by the last two releases of cmd/compile,
// plus tip. // plus tip.
func FindExportData(r *bufio.Reader) (size int64, err error) { func FindExportData(r *bufio.Reader) (size int64, err error) {
arsize, err := FindPackageDefinition(r)
if err != nil {
return
}
size = int64(arsize)
objapi, headers, err := ReadObjectHeaders(r)
if err != nil {
return
}
size -= int64(len(objapi))
for _, h := range headers {
size -= int64(len(h))
}
// Check for the binary export data section header "$$B\n".
// TODO(taking): Unify with ReadExportDataHeader so that it stops at the 'u' instead of reading
line, err := r.ReadSlice('\n')
if err != nil {
return
}
hdr := string(line)
if hdr != "$$B\n" {
err = fmt.Errorf("unknown export data header: %q", hdr)
return
}
size -= int64(len(hdr))
// For files with a binary export data header "$$B\n",
// these are always terminated by an end-of-section marker "\n$$\n".
// So the last bytes must always be this constant.
//
// The end-of-section marker is not a part of the export data itself.
// Do not include these in size.
//
// It would be nice to have sanity check that the final bytes after
// the export data are indeed the end-of-section marker. The split
// of gcexportdata.NewReader and gcexportdata.Read make checking this
// ugly so gcimporter gives up enforcing this. The compiler and go/types
// importer do enforce this, which seems good enough.
const endofsection = "\n$$\n"
size -= int64(len(endofsection))
if size < 0 {
err = fmt.Errorf("invalid size (%d) in the archive file: %d bytes remain without section headers (recompile package)", arsize, size)
return
}
return
}
// ReadUnified reads the contents of the unified export data from a reader r
// that contains the contents of a GC-created archive file.
//
// On success, the reader will be positioned after the end-of-section marker "\n$$\n".
//
// Supported GC-created archive files have 4 layers of nesting:
// - An archive file containing a package definition file.
// - The package definition file contains headers followed by a data section.
// Headers are lines (≤ 4kb) that do not start with "$$".
// - The data section starts with "$$B\n" followed by export data followed
// by an end of section marker "\n$$\n". (The section start "$$\n" is no
// longer supported.)
// - The export data starts with a format byte ('u') followed by the <data> in
// the given format. (See ReadExportDataHeader for older formats.)
//
// Putting this together, the bytes in a GC-created archive files are expected
// to look like the following.
// See cmd/internal/archive for more details on ar file headers.
//
// | <!arch>\n | ar file signature
// | __.PKGDEF...size...\n | ar header for __.PKGDEF including size.
// | go object <...>\n | objabi header
// | <optional headers>\n | other headers such as build id
// | $$B\n | binary format marker
// | u<data>\n | unified export <data>
// | $$\n | end-of-section marker
// | [optional padding] | padding byte (0x0A) if size is odd
// | [ar file header] | other ar files
// | [ar file data] |
func ReadUnified(r *bufio.Reader) (data []byte, err error) {
// We historically guaranteed headers at the default buffer size (4096) work.
// This ensures we can use ReadSlice throughout.
const minBufferSize = 4096
r = bufio.NewReaderSize(r, minBufferSize)
size, err := FindPackageDefinition(r)
if err != nil {
return
}
n := size
objapi, headers, err := ReadObjectHeaders(r)
if err != nil {
return
}
n -= len(objapi)
for _, h := range headers {
n -= len(h)
}
hdrlen, err := ReadExportDataHeader(r)
if err != nil {
return
}
n -= hdrlen
// size also includes the end of section marker. Remove that many bytes from the end.
const marker = "\n$$\n"
n -= len(marker)
if n < 0 {
err = fmt.Errorf("invalid size (%d) in the archive file: %d bytes remain without section headers (recompile package)", size, n)
return
}
// Read n bytes from buf.
data = make([]byte, n)
_, err = io.ReadFull(r, data)
if err != nil {
return
}
// Check for marker at the end.
var suffix [len(marker)]byte
_, err = io.ReadFull(r, suffix[:])
if err != nil {
return
}
if s := string(suffix[:]); s != marker {
err = fmt.Errorf("read %q instead of end-of-section marker (%q)", s, marker)
return
}
return
}
// FindPackageDefinition positions the reader r at the beginning of a package
// definition file ("__.PKGDEF") within a GC-created archive by reading
// from it, and returns the size of the package definition file in the archive.
//
// The reader must be positioned at the start of the archive file before calling
// this function, and "__.PKGDEF" is assumed to be the first file in the archive.
//
// See cmd/internal/archive for details on the archive format.
func FindPackageDefinition(r *bufio.Reader) (size int, err error) {
// Uses ReadSlice to limit risk of malformed inputs.
// Read first line to make sure this is an object file. // Read first line to make sure this is an object file.
line, err := r.ReadSlice('\n') line, err := r.ReadSlice('\n')
if err != nil { if err != nil {
@ -61,56 +192,230 @@ func FindExportData(r *bufio.Reader) (size int64, err error) {
return return
} }
// Archive file. Scan to __.PKGDEF. // package export block should be first
var name string size = readArchiveHeader(r, "__.PKGDEF")
if name, size, err = readGopackHeader(r); err != nil { if size <= 0 {
return err = fmt.Errorf("not a package file")
}
arsize := size
// First entry should be __.PKGDEF.
if name != "__.PKGDEF" {
err = fmt.Errorf("go archive is missing __.PKGDEF")
return
}
// Read first line of __.PKGDEF data, so that line
// is once again the first line of the input.
if line, err = r.ReadSlice('\n'); err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
size -= int64(len(line))
// Now at __.PKGDEF in archive or still at beginning of file.
// Either way, line should begin with "go object ".
if !strings.HasPrefix(string(line), "go object ") {
err = fmt.Errorf("not a Go object file")
return
}
// Skip over object headers to get to the export data section header "$$B\n".
// Object headers are lines that do not start with '$'.
for line[0] != '$' {
if line, err = r.ReadSlice('\n'); err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
size -= int64(len(line))
}
// Check for the binary export data section header "$$B\n".
hdr := string(line)
if hdr != "$$B\n" {
err = fmt.Errorf("unknown export data header: %q", hdr)
return
}
// TODO(taking): Remove end-of-section marker "\n$$\n" from size.
if size < 0 {
err = fmt.Errorf("invalid size (%d) in the archive file: %d bytes remain without section headers (recompile package)", arsize, size)
return return
} }
return return
} }
// ReadObjectHeaders reads object headers from the reader. Object headers are
// lines that do not start with an end-of-section marker "$$". The first header
// is the objabi header. On success, the reader will be positioned at the beginning
// of the end-of-section marker.
//
// It returns an error if any header does not fit in r.Size() bytes.
func ReadObjectHeaders(r *bufio.Reader) (objapi string, headers []string, err error) {
// line is a temporary buffer for headers.
// Use bounded reads (ReadSlice, Peek) to limit risk of malformed inputs.
var line []byte
// objapi header should be the first line
if line, err = r.ReadSlice('\n'); err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
objapi = string(line)
// objapi header begins with "go object ".
if !strings.HasPrefix(objapi, "go object ") {
err = fmt.Errorf("not a go object file: %s", objapi)
return
}
// process remaining object header lines
for {
// check for an end of section marker "$$"
line, err = r.Peek(2)
if err != nil {
return
}
if string(line) == "$$" {
return // stop
}
// read next header
line, err = r.ReadSlice('\n')
if err != nil {
return
}
headers = append(headers, string(line))
}
}
// ReadExportDataHeader reads the export data header and format from r.
// It returns the number of bytes read, or an error if the format is no longer
// supported or it failed to read.
//
// The only currently supported format is binary export data in the
// unified export format.
func ReadExportDataHeader(r *bufio.Reader) (n int, err error) {
// Read export data header.
line, err := r.ReadSlice('\n')
if err != nil {
return
}
hdr := string(line)
switch hdr {
case "$$\n":
err = fmt.Errorf("old textual export format no longer supported (recompile package)")
return
case "$$B\n":
var format byte
format, err = r.ReadByte()
if err != nil {
return
}
// The unified export format starts with a 'u'.
switch format {
case 'u':
default:
// Older no longer supported export formats include:
// indexed export format which started with an 'i'; and
// the older binary export format which started with a 'c',
// 'd', or 'v' (from "version").
err = fmt.Errorf("binary export format %q is no longer supported (recompile package)", format)
return
}
default:
err = fmt.Errorf("unknown export data header: %q", hdr)
return
}
n = len(hdr) + 1 // + 1 is for 'u'
return
}
// FindPkg returns the filename and unique package id for an import
// path based on package information provided by build.Import (using
// the build.Default build.Context). A relative srcDir is interpreted
// relative to the current working directory.
//
// FindPkg is only used in tests within x/tools.
func FindPkg(path, srcDir string) (filename, id string, err error) {
// TODO(taking): Move internal/exportdata.FindPkg into its own file,
// and then this copy into a _test package.
if path == "" {
return "", "", errors.New("path is empty")
}
var noext string
switch {
default:
// "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
// Don't require the source files to be present.
if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
srcDir = abs
}
var bp *build.Package
bp, err = build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
if bp.PkgObj == "" {
if bp.Goroot && bp.Dir != "" {
filename, err = lookupGorootExport(bp.Dir)
if err == nil {
_, err = os.Stat(filename)
}
if err == nil {
return filename, bp.ImportPath, nil
}
}
goto notfound
} else {
noext = strings.TrimSuffix(bp.PkgObj, ".a")
}
id = bp.ImportPath
case build.IsLocalImport(path):
// "./x" -> "/this/directory/x.ext", "/this/directory/x"
noext = filepath.Join(srcDir, path)
id = noext
case filepath.IsAbs(path):
// for completeness only - go/build.Import
// does not support absolute imports
// "/x" -> "/x.ext", "/x"
noext = path
id = path
}
if false { // for debugging
if path != id {
fmt.Printf("%s -> %s\n", path, id)
}
}
// try extensions
for _, ext := range pkgExts {
filename = noext + ext
f, statErr := os.Stat(filename)
if statErr == nil && !f.IsDir() {
return filename, id, nil
}
if err == nil {
err = statErr
}
}
notfound:
if err == nil {
return "", path, fmt.Errorf("can't find import: %q", path)
}
return "", path, fmt.Errorf("can't find import: %q: %w", path, err)
}
var pkgExts = [...]string{".a", ".o"} // a file from the build cache will have no extension
var exportMap sync.Map // package dir → func() (string, error)
// lookupGorootExport returns the location of the export data
// (normally found in the build cache, but located in GOROOT/pkg
// in prior Go releases) for the package located in pkgDir.
//
// (We use the package's directory instead of its import path
// mainly to simplify handling of the packages in src/vendor
// and cmd/vendor.)
//
// lookupGorootExport is only used in tests within x/tools.
func lookupGorootExport(pkgDir string) (string, error) {
f, ok := exportMap.Load(pkgDir)
if !ok {
var (
listOnce sync.Once
exportPath string
err error
)
f, _ = exportMap.LoadOrStore(pkgDir, func() (string, error) {
listOnce.Do(func() {
cmd := exec.Command(filepath.Join(build.Default.GOROOT, "bin", "go"), "list", "-export", "-f", "{{.Export}}", pkgDir)
cmd.Dir = build.Default.GOROOT
cmd.Env = append(os.Environ(), "PWD="+cmd.Dir, "GOROOT="+build.Default.GOROOT)
var output []byte
output, err = cmd.Output()
if err != nil {
if ee, ok := err.(*exec.ExitError); ok && len(ee.Stderr) > 0 {
err = errors.New(string(ee.Stderr))
}
return
}
exports := strings.Split(string(bytes.TrimSpace(output)), "\n")
if len(exports) != 1 {
err = fmt.Errorf("go list reported %d exports; expected 1", len(exports))
return
}
exportPath = exports[0]
})
return exportPath, err
})
}
return f.(func() (string, error))()
}

View File

@ -23,17 +23,11 @@ package gcimporter // import "golang.org/x/tools/internal/gcimporter"
import ( import (
"bufio" "bufio"
"bytes"
"fmt" "fmt"
"go/build"
"go/token" "go/token"
"go/types" "go/types"
"io" "io"
"os" "os"
"os/exec"
"path/filepath"
"strings"
"sync"
) )
const ( const (
@ -45,127 +39,14 @@ const (
trace = false trace = false
) )
var exportMap sync.Map // package dir → func() (string, bool)
// lookupGorootExport returns the location of the export data
// (normally found in the build cache, but located in GOROOT/pkg
// in prior Go releases) for the package located in pkgDir.
//
// (We use the package's directory instead of its import path
// mainly to simplify handling of the packages in src/vendor
// and cmd/vendor.)
func lookupGorootExport(pkgDir string) (string, bool) {
f, ok := exportMap.Load(pkgDir)
if !ok {
var (
listOnce sync.Once
exportPath string
)
f, _ = exportMap.LoadOrStore(pkgDir, func() (string, bool) {
listOnce.Do(func() {
cmd := exec.Command("go", "list", "-export", "-f", "{{.Export}}", pkgDir)
cmd.Dir = build.Default.GOROOT
var output []byte
output, err := cmd.Output()
if err != nil {
return
}
exports := strings.Split(string(bytes.TrimSpace(output)), "\n")
if len(exports) != 1 {
return
}
exportPath = exports[0]
})
return exportPath, exportPath != ""
})
}
return f.(func() (string, bool))()
}
var pkgExts = [...]string{".a", ".o"}
// FindPkg returns the filename and unique package id for an import
// path based on package information provided by build.Import (using
// the build.Default build.Context). A relative srcDir is interpreted
// relative to the current working directory.
// If no file was found, an empty filename is returned.
func FindPkg(path, srcDir string) (filename, id string) {
if path == "" {
return
}
var noext string
switch {
default:
// "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
// Don't require the source files to be present.
if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
srcDir = abs
}
bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
if bp.PkgObj == "" {
var ok bool
if bp.Goroot && bp.Dir != "" {
filename, ok = lookupGorootExport(bp.Dir)
}
if !ok {
id = path // make sure we have an id to print in error message
return
}
} else {
noext = strings.TrimSuffix(bp.PkgObj, ".a")
id = bp.ImportPath
}
case build.IsLocalImport(path):
// "./x" -> "/this/directory/x.ext", "/this/directory/x"
noext = filepath.Join(srcDir, path)
id = noext
case filepath.IsAbs(path):
// for completeness only - go/build.Import
// does not support absolute imports
// "/x" -> "/x.ext", "/x"
noext = path
id = path
}
if false { // for debugging
if path != id {
fmt.Printf("%s -> %s\n", path, id)
}
}
if filename != "" {
if f, err := os.Stat(filename); err == nil && !f.IsDir() {
return
}
}
// try extensions
for _, ext := range pkgExts {
filename = noext + ext
if f, err := os.Stat(filename); err == nil && !f.IsDir() {
return
}
}
filename = "" // not found
return
}
// Import imports a gc-generated package given its import path and srcDir, adds // Import imports a gc-generated package given its import path and srcDir, adds
// the corresponding package object to the packages map, and returns the object. // the corresponding package object to the packages map, and returns the object.
// The packages map must contain all packages already imported. // The packages map must contain all packages already imported.
// //
// TODO(taking): Import is only used in tests. Move to gcimporter_test. // Import is only used in tests.
func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) { func Import(fset *token.FileSet, packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
var rc io.ReadCloser var rc io.ReadCloser
var filename, id string var id string
if lookup != nil { if lookup != nil {
// With custom lookup specified, assume that caller has // With custom lookup specified, assume that caller has
// converted path to a canonical import path for use in the map. // converted path to a canonical import path for use in the map.
@ -184,12 +65,13 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
} }
rc = f rc = f
} else { } else {
filename, id = FindPkg(path, srcDir) var filename string
filename, id, err = FindPkg(path, srcDir)
if filename == "" { if filename == "" {
if path == "unsafe" { if path == "unsafe" {
return types.Unsafe, nil return types.Unsafe, nil
} }
return nil, fmt.Errorf("can't find import: %q", id) return nil, err
} }
// no need to re-import if the package was imported completely before // no need to re-import if the package was imported completely before
@ -212,54 +94,15 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
} }
defer rc.Close() defer rc.Close()
var size int64
buf := bufio.NewReader(rc) buf := bufio.NewReader(rc)
if size, err = FindExportData(buf); err != nil { data, err := ReadUnified(buf)
return
}
var data []byte
data, err = io.ReadAll(buf)
if err != nil { if err != nil {
err = fmt.Errorf("import %q: %v", path, err)
return return
} }
if len(data) == 0 {
return nil, fmt.Errorf("no data to load a package from for path %s", id)
}
// TODO(gri): allow clients of go/importer to provide a FileSet. // unified: emitted by cmd/compile since go1.20.
// Or, define a new standard go/types/gcexportdata package. _, pkg, err = UImportData(fset, packages, data, id)
fset := token.NewFileSet()
// Select appropriate importer. return
switch data[0] {
case 'v', 'c', 'd':
// binary: emitted by cmd/compile till go1.10; obsolete.
return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0])
case 'i':
// indexed: emitted by cmd/compile till go1.19;
// now used only for serializing go/types.
// See https://github.com/golang/go/issues/69491.
_, pkg, err := IImportData(fset, packages, data[1:], id)
return pkg, err
case 'u':
// unified: emitted by cmd/compile since go1.20.
_, pkg, err := UImportData(fset, packages, data[1:size], id)
return pkg, err
default:
l := len(data)
if l > 10 {
l = 10
}
return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), id)
}
} }
type byPath []*types.Package
func (a byPath) Len() int { return len(a) }
func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }

View File

@ -5,8 +5,6 @@
// Indexed package import. // Indexed package import.
// See iexport.go for the export data format. // See iexport.go for the export data format.
// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
package gcimporter package gcimporter
import ( import (
@ -1111,3 +1109,9 @@ func (r *importReader) byte() byte {
} }
return x return x
} }
type byPath []*types.Package
func (a byPath) Len() int { return len(a) }
func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }

View File

@ -0,0 +1,30 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gcimporter
import (
"bufio"
"io"
"strconv"
"strings"
)
// Copy of $GOROOT/src/cmd/internal/archive.ReadHeader.
func readArchiveHeader(b *bufio.Reader, name string) int {
// architecture-independent object file output
const HeaderSize = 60
var buf [HeaderSize]byte
if _, err := io.ReadFull(b, buf[:]); err != nil {
return -1
}
aname := strings.Trim(string(buf[0:16]), " ")
if !strings.HasPrefix(aname, name) {
return -1
}
asize := strings.Trim(string(buf[48:58]), " ")
i, _ := strconv.Atoi(asize)
return i
}

View File

@ -11,7 +11,6 @@ import (
"go/token" "go/token"
"go/types" "go/types"
"sort" "sort"
"strings"
"golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/aliases"
"golang.org/x/tools/internal/pkgbits" "golang.org/x/tools/internal/pkgbits"
@ -71,7 +70,6 @@ func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []
} }
s := string(data) s := string(data)
s = s[:strings.LastIndex(s, "\n$$\n")]
input := pkgbits.NewPkgDecoder(path, s) input := pkgbits.NewPkgDecoder(path, s)
pkg = readUnifiedPackage(fset, nil, imports, input) pkg = readUnifiedPackage(fset, nil, imports, input)
return return
@ -266,7 +264,12 @@ func (pr *pkgReader) pkgIdx(idx pkgbits.Index) *types.Package {
func (r *reader) doPkg() *types.Package { func (r *reader) doPkg() *types.Package {
path := r.String() path := r.String()
switch path { switch path {
case "": // cmd/compile emits path="main" for main packages because
// that's the linker symbol prefix it used; but we need
// the package's path as it would be reported by go list,
// hence "main" below.
// See test at go/packages.TestMainPackagePathInModeTypes.
case "", "main":
path = r.p.PkgPath() path = r.p.PkgPath()
case "builtin": case "builtin":
return nil // universe return nil // universe

View File

@ -268,6 +268,8 @@ var PackageSymbols = map[string][]Symbol{
{"ErrTooLarge", Var, 0}, {"ErrTooLarge", Var, 0},
{"Fields", Func, 0}, {"Fields", Func, 0},
{"FieldsFunc", Func, 0}, {"FieldsFunc", Func, 0},
{"FieldsFuncSeq", Func, 24},
{"FieldsSeq", Func, 24},
{"HasPrefix", Func, 0}, {"HasPrefix", Func, 0},
{"HasSuffix", Func, 0}, {"HasSuffix", Func, 0},
{"Index", Func, 0}, {"Index", Func, 0},
@ -280,6 +282,7 @@ var PackageSymbols = map[string][]Symbol{
{"LastIndexAny", Func, 0}, {"LastIndexAny", Func, 0},
{"LastIndexByte", Func, 5}, {"LastIndexByte", Func, 5},
{"LastIndexFunc", Func, 0}, {"LastIndexFunc", Func, 0},
{"Lines", Func, 24},
{"Map", Func, 0}, {"Map", Func, 0},
{"MinRead", Const, 0}, {"MinRead", Const, 0},
{"NewBuffer", Func, 0}, {"NewBuffer", Func, 0},
@ -293,7 +296,9 @@ var PackageSymbols = map[string][]Symbol{
{"Split", Func, 0}, {"Split", Func, 0},
{"SplitAfter", Func, 0}, {"SplitAfter", Func, 0},
{"SplitAfterN", Func, 0}, {"SplitAfterN", Func, 0},
{"SplitAfterSeq", Func, 24},
{"SplitN", Func, 0}, {"SplitN", Func, 0},
{"SplitSeq", Func, 24},
{"Title", Func, 0}, {"Title", Func, 0},
{"ToLower", Func, 0}, {"ToLower", Func, 0},
{"ToLowerSpecial", Func, 0}, {"ToLowerSpecial", Func, 0},
@ -535,6 +540,7 @@ var PackageSymbols = map[string][]Symbol{
{"NewCTR", Func, 0}, {"NewCTR", Func, 0},
{"NewGCM", Func, 2}, {"NewGCM", Func, 2},
{"NewGCMWithNonceSize", Func, 5}, {"NewGCMWithNonceSize", Func, 5},
{"NewGCMWithRandomNonce", Func, 24},
{"NewGCMWithTagSize", Func, 11}, {"NewGCMWithTagSize", Func, 11},
{"NewOFB", Func, 0}, {"NewOFB", Func, 0},
{"Stream", Type, 0}, {"Stream", Type, 0},
@ -673,6 +679,14 @@ var PackageSymbols = map[string][]Symbol{
{"Unmarshal", Func, 0}, {"Unmarshal", Func, 0},
{"UnmarshalCompressed", Func, 15}, {"UnmarshalCompressed", Func, 15},
}, },
"crypto/fips140": {
{"Enabled", Func, 24},
},
"crypto/hkdf": {
{"Expand", Func, 24},
{"Extract", Func, 24},
{"Key", Func, 24},
},
"crypto/hmac": { "crypto/hmac": {
{"Equal", Func, 1}, {"Equal", Func, 1},
{"New", Func, 0}, {"New", Func, 0},
@ -683,11 +697,43 @@ var PackageSymbols = map[string][]Symbol{
{"Size", Const, 0}, {"Size", Const, 0},
{"Sum", Func, 2}, {"Sum", Func, 2},
}, },
"crypto/mlkem": {
{"(*DecapsulationKey1024).Bytes", Method, 24},
{"(*DecapsulationKey1024).Decapsulate", Method, 24},
{"(*DecapsulationKey1024).EncapsulationKey", Method, 24},
{"(*DecapsulationKey768).Bytes", Method, 24},
{"(*DecapsulationKey768).Decapsulate", Method, 24},
{"(*DecapsulationKey768).EncapsulationKey", Method, 24},
{"(*EncapsulationKey1024).Bytes", Method, 24},
{"(*EncapsulationKey1024).Encapsulate", Method, 24},
{"(*EncapsulationKey768).Bytes", Method, 24},
{"(*EncapsulationKey768).Encapsulate", Method, 24},
{"CiphertextSize1024", Const, 24},
{"CiphertextSize768", Const, 24},
{"DecapsulationKey1024", Type, 24},
{"DecapsulationKey768", Type, 24},
{"EncapsulationKey1024", Type, 24},
{"EncapsulationKey768", Type, 24},
{"EncapsulationKeySize1024", Const, 24},
{"EncapsulationKeySize768", Const, 24},
{"GenerateKey1024", Func, 24},
{"GenerateKey768", Func, 24},
{"NewDecapsulationKey1024", Func, 24},
{"NewDecapsulationKey768", Func, 24},
{"NewEncapsulationKey1024", Func, 24},
{"NewEncapsulationKey768", Func, 24},
{"SeedSize", Const, 24},
{"SharedKeySize", Const, 24},
},
"crypto/pbkdf2": {
{"Key", Func, 24},
},
"crypto/rand": { "crypto/rand": {
{"Int", Func, 0}, {"Int", Func, 0},
{"Prime", Func, 0}, {"Prime", Func, 0},
{"Read", Func, 0}, {"Read", Func, 0},
{"Reader", Var, 0}, {"Reader", Var, 0},
{"Text", Func, 24},
}, },
"crypto/rc4": { "crypto/rc4": {
{"(*Cipher).Reset", Method, 0}, {"(*Cipher).Reset", Method, 0},
@ -766,6 +812,39 @@ var PackageSymbols = map[string][]Symbol{
{"Sum224", Func, 2}, {"Sum224", Func, 2},
{"Sum256", Func, 2}, {"Sum256", Func, 2},
}, },
"crypto/sha3": {
{"(*SHA3).AppendBinary", Method, 24},
{"(*SHA3).BlockSize", Method, 24},
{"(*SHA3).MarshalBinary", Method, 24},
{"(*SHA3).Reset", Method, 24},
{"(*SHA3).Size", Method, 24},
{"(*SHA3).Sum", Method, 24},
{"(*SHA3).UnmarshalBinary", Method, 24},
{"(*SHA3).Write", Method, 24},
{"(*SHAKE).AppendBinary", Method, 24},
{"(*SHAKE).BlockSize", Method, 24},
{"(*SHAKE).MarshalBinary", Method, 24},
{"(*SHAKE).Read", Method, 24},
{"(*SHAKE).Reset", Method, 24},
{"(*SHAKE).UnmarshalBinary", Method, 24},
{"(*SHAKE).Write", Method, 24},
{"New224", Func, 24},
{"New256", Func, 24},
{"New384", Func, 24},
{"New512", Func, 24},
{"NewCSHAKE128", Func, 24},
{"NewCSHAKE256", Func, 24},
{"NewSHAKE128", Func, 24},
{"NewSHAKE256", Func, 24},
{"SHA3", Type, 24},
{"SHAKE", Type, 24},
{"Sum224", Func, 24},
{"Sum256", Func, 24},
{"Sum384", Func, 24},
{"Sum512", Func, 24},
{"SumSHAKE128", Func, 24},
{"SumSHAKE256", Func, 24},
},
"crypto/sha512": { "crypto/sha512": {
{"BlockSize", Const, 0}, {"BlockSize", Const, 0},
{"New", Func, 0}, {"New", Func, 0},
@ -788,6 +867,7 @@ var PackageSymbols = map[string][]Symbol{
{"ConstantTimeEq", Func, 0}, {"ConstantTimeEq", Func, 0},
{"ConstantTimeLessOrEq", Func, 2}, {"ConstantTimeLessOrEq", Func, 2},
{"ConstantTimeSelect", Func, 0}, {"ConstantTimeSelect", Func, 0},
{"WithDataIndependentTiming", Func, 24},
{"XORBytes", Func, 20}, {"XORBytes", Func, 20},
}, },
"crypto/tls": { "crypto/tls": {
@ -864,6 +944,7 @@ var PackageSymbols = map[string][]Symbol{
{"ClientHelloInfo", Type, 4}, {"ClientHelloInfo", Type, 4},
{"ClientHelloInfo.CipherSuites", Field, 4}, {"ClientHelloInfo.CipherSuites", Field, 4},
{"ClientHelloInfo.Conn", Field, 8}, {"ClientHelloInfo.Conn", Field, 8},
{"ClientHelloInfo.Extensions", Field, 24},
{"ClientHelloInfo.ServerName", Field, 4}, {"ClientHelloInfo.ServerName", Field, 4},
{"ClientHelloInfo.SignatureSchemes", Field, 8}, {"ClientHelloInfo.SignatureSchemes", Field, 8},
{"ClientHelloInfo.SupportedCurves", Field, 4}, {"ClientHelloInfo.SupportedCurves", Field, 4},
@ -881,6 +962,7 @@ var PackageSymbols = map[string][]Symbol{
{"Config.CurvePreferences", Field, 3}, {"Config.CurvePreferences", Field, 3},
{"Config.DynamicRecordSizingDisabled", Field, 7}, {"Config.DynamicRecordSizingDisabled", Field, 7},
{"Config.EncryptedClientHelloConfigList", Field, 23}, {"Config.EncryptedClientHelloConfigList", Field, 23},
{"Config.EncryptedClientHelloKeys", Field, 24},
{"Config.EncryptedClientHelloRejectionVerify", Field, 23}, {"Config.EncryptedClientHelloRejectionVerify", Field, 23},
{"Config.GetCertificate", Field, 4}, {"Config.GetCertificate", Field, 4},
{"Config.GetClientCertificate", Field, 8}, {"Config.GetClientCertificate", Field, 8},
@ -934,6 +1016,10 @@ var PackageSymbols = map[string][]Symbol{
{"ECHRejectionError", Type, 23}, {"ECHRejectionError", Type, 23},
{"ECHRejectionError.RetryConfigList", Field, 23}, {"ECHRejectionError.RetryConfigList", Field, 23},
{"Ed25519", Const, 13}, {"Ed25519", Const, 13},
{"EncryptedClientHelloKey", Type, 24},
{"EncryptedClientHelloKey.Config", Field, 24},
{"EncryptedClientHelloKey.PrivateKey", Field, 24},
{"EncryptedClientHelloKey.SendAsRetry", Field, 24},
{"InsecureCipherSuites", Func, 14}, {"InsecureCipherSuites", Func, 14},
{"Listen", Func, 0}, {"Listen", Func, 0},
{"LoadX509KeyPair", Func, 0}, {"LoadX509KeyPair", Func, 0},
@ -1032,6 +1118,7 @@ var PackageSymbols = map[string][]Symbol{
{"VersionTLS12", Const, 2}, {"VersionTLS12", Const, 2},
{"VersionTLS13", Const, 12}, {"VersionTLS13", Const, 12},
{"X25519", Const, 8}, {"X25519", Const, 8},
{"X25519MLKEM768", Const, 24},
{"X509KeyPair", Func, 0}, {"X509KeyPair", Func, 0},
}, },
"crypto/x509": { "crypto/x509": {
@ -1056,6 +1143,8 @@ var PackageSymbols = map[string][]Symbol{
{"(ConstraintViolationError).Error", Method, 0}, {"(ConstraintViolationError).Error", Method, 0},
{"(HostnameError).Error", Method, 0}, {"(HostnameError).Error", Method, 0},
{"(InsecureAlgorithmError).Error", Method, 6}, {"(InsecureAlgorithmError).Error", Method, 6},
{"(OID).AppendBinary", Method, 24},
{"(OID).AppendText", Method, 24},
{"(OID).Equal", Method, 22}, {"(OID).Equal", Method, 22},
{"(OID).EqualASN1OID", Method, 22}, {"(OID).EqualASN1OID", Method, 22},
{"(OID).MarshalBinary", Method, 23}, {"(OID).MarshalBinary", Method, 23},
@ -1084,6 +1173,10 @@ var PackageSymbols = map[string][]Symbol{
{"Certificate.Extensions", Field, 2}, {"Certificate.Extensions", Field, 2},
{"Certificate.ExtraExtensions", Field, 2}, {"Certificate.ExtraExtensions", Field, 2},
{"Certificate.IPAddresses", Field, 1}, {"Certificate.IPAddresses", Field, 1},
{"Certificate.InhibitAnyPolicy", Field, 24},
{"Certificate.InhibitAnyPolicyZero", Field, 24},
{"Certificate.InhibitPolicyMapping", Field, 24},
{"Certificate.InhibitPolicyMappingZero", Field, 24},
{"Certificate.IsCA", Field, 0}, {"Certificate.IsCA", Field, 0},
{"Certificate.Issuer", Field, 0}, {"Certificate.Issuer", Field, 0},
{"Certificate.IssuingCertificateURL", Field, 2}, {"Certificate.IssuingCertificateURL", Field, 2},
@ -1100,6 +1193,7 @@ var PackageSymbols = map[string][]Symbol{
{"Certificate.PermittedURIDomains", Field, 10}, {"Certificate.PermittedURIDomains", Field, 10},
{"Certificate.Policies", Field, 22}, {"Certificate.Policies", Field, 22},
{"Certificate.PolicyIdentifiers", Field, 0}, {"Certificate.PolicyIdentifiers", Field, 0},
{"Certificate.PolicyMappings", Field, 24},
{"Certificate.PublicKey", Field, 0}, {"Certificate.PublicKey", Field, 0},
{"Certificate.PublicKeyAlgorithm", Field, 0}, {"Certificate.PublicKeyAlgorithm", Field, 0},
{"Certificate.Raw", Field, 0}, {"Certificate.Raw", Field, 0},
@ -1107,6 +1201,8 @@ var PackageSymbols = map[string][]Symbol{
{"Certificate.RawSubject", Field, 0}, {"Certificate.RawSubject", Field, 0},
{"Certificate.RawSubjectPublicKeyInfo", Field, 0}, {"Certificate.RawSubjectPublicKeyInfo", Field, 0},
{"Certificate.RawTBSCertificate", Field, 0}, {"Certificate.RawTBSCertificate", Field, 0},
{"Certificate.RequireExplicitPolicy", Field, 24},
{"Certificate.RequireExplicitPolicyZero", Field, 24},
{"Certificate.SerialNumber", Field, 0}, {"Certificate.SerialNumber", Field, 0},
{"Certificate.Signature", Field, 0}, {"Certificate.Signature", Field, 0},
{"Certificate.SignatureAlgorithm", Field, 0}, {"Certificate.SignatureAlgorithm", Field, 0},
@ -1198,6 +1294,7 @@ var PackageSymbols = map[string][]Symbol{
{"NameConstraintsWithoutSANs", Const, 10}, {"NameConstraintsWithoutSANs", Const, 10},
{"NameMismatch", Const, 8}, {"NameMismatch", Const, 8},
{"NewCertPool", Func, 0}, {"NewCertPool", Func, 0},
{"NoValidChains", Const, 24},
{"NotAuthorizedToSign", Const, 0}, {"NotAuthorizedToSign", Const, 0},
{"OID", Type, 22}, {"OID", Type, 22},
{"OIDFromInts", Func, 22}, {"OIDFromInts", Func, 22},
@ -1219,6 +1316,9 @@ var PackageSymbols = map[string][]Symbol{
{"ParsePKCS8PrivateKey", Func, 0}, {"ParsePKCS8PrivateKey", Func, 0},
{"ParsePKIXPublicKey", Func, 0}, {"ParsePKIXPublicKey", Func, 0},
{"ParseRevocationList", Func, 19}, {"ParseRevocationList", Func, 19},
{"PolicyMapping", Type, 24},
{"PolicyMapping.IssuerDomainPolicy", Field, 24},
{"PolicyMapping.SubjectDomainPolicy", Field, 24},
{"PublicKeyAlgorithm", Type, 0}, {"PublicKeyAlgorithm", Type, 0},
{"PureEd25519", Const, 13}, {"PureEd25519", Const, 13},
{"RSA", Const, 0}, {"RSA", Const, 0},
@ -1265,6 +1365,7 @@ var PackageSymbols = map[string][]Symbol{
{"UnknownPublicKeyAlgorithm", Const, 0}, {"UnknownPublicKeyAlgorithm", Const, 0},
{"UnknownSignatureAlgorithm", Const, 0}, {"UnknownSignatureAlgorithm", Const, 0},
{"VerifyOptions", Type, 0}, {"VerifyOptions", Type, 0},
{"VerifyOptions.CertificatePolicies", Field, 24},
{"VerifyOptions.CurrentTime", Field, 0}, {"VerifyOptions.CurrentTime", Field, 0},
{"VerifyOptions.DNSName", Field, 0}, {"VerifyOptions.DNSName", Field, 0},
{"VerifyOptions.Intermediates", Field, 0}, {"VerifyOptions.Intermediates", Field, 0},
@ -1975,6 +2076,8 @@ var PackageSymbols = map[string][]Symbol{
{"(*File).DynString", Method, 1}, {"(*File).DynString", Method, 1},
{"(*File).DynValue", Method, 21}, {"(*File).DynValue", Method, 21},
{"(*File).DynamicSymbols", Method, 4}, {"(*File).DynamicSymbols", Method, 4},
{"(*File).DynamicVersionNeeds", Method, 24},
{"(*File).DynamicVersions", Method, 24},
{"(*File).ImportedLibraries", Method, 0}, {"(*File).ImportedLibraries", Method, 0},
{"(*File).ImportedSymbols", Method, 0}, {"(*File).ImportedSymbols", Method, 0},
{"(*File).Section", Method, 0}, {"(*File).Section", Method, 0},
@ -2240,6 +2343,19 @@ var PackageSymbols = map[string][]Symbol{
{"DynFlag", Type, 0}, {"DynFlag", Type, 0},
{"DynFlag1", Type, 21}, {"DynFlag1", Type, 21},
{"DynTag", Type, 0}, {"DynTag", Type, 0},
{"DynamicVersion", Type, 24},
{"DynamicVersion.Deps", Field, 24},
{"DynamicVersion.Flags", Field, 24},
{"DynamicVersion.Index", Field, 24},
{"DynamicVersion.Name", Field, 24},
{"DynamicVersionDep", Type, 24},
{"DynamicVersionDep.Dep", Field, 24},
{"DynamicVersionDep.Flags", Field, 24},
{"DynamicVersionDep.Index", Field, 24},
{"DynamicVersionFlag", Type, 24},
{"DynamicVersionNeed", Type, 24},
{"DynamicVersionNeed.Name", Field, 24},
{"DynamicVersionNeed.Needs", Field, 24},
{"EI_ABIVERSION", Const, 0}, {"EI_ABIVERSION", Const, 0},
{"EI_CLASS", Const, 0}, {"EI_CLASS", Const, 0},
{"EI_DATA", Const, 0}, {"EI_DATA", Const, 0},
@ -3726,8 +3842,19 @@ var PackageSymbols = map[string][]Symbol{
{"Symbol.Size", Field, 0}, {"Symbol.Size", Field, 0},
{"Symbol.Value", Field, 0}, {"Symbol.Value", Field, 0},
{"Symbol.Version", Field, 13}, {"Symbol.Version", Field, 13},
{"Symbol.VersionIndex", Field, 24},
{"Symbol.VersionScope", Field, 24},
{"SymbolVersionScope", Type, 24},
{"Type", Type, 0}, {"Type", Type, 0},
{"VER_FLG_BASE", Const, 24},
{"VER_FLG_INFO", Const, 24},
{"VER_FLG_WEAK", Const, 24},
{"Version", Type, 0}, {"Version", Type, 0},
{"VersionScopeGlobal", Const, 24},
{"VersionScopeHidden", Const, 24},
{"VersionScopeLocal", Const, 24},
{"VersionScopeNone", Const, 24},
{"VersionScopeSpecific", Const, 24},
}, },
"debug/gosym": { "debug/gosym": {
{"(*DecodingError).Error", Method, 0}, {"(*DecodingError).Error", Method, 0},
@ -4453,8 +4580,10 @@ var PackageSymbols = map[string][]Symbol{
{"FS", Type, 16}, {"FS", Type, 16},
}, },
"encoding": { "encoding": {
{"BinaryAppender", Type, 24},
{"BinaryMarshaler", Type, 2}, {"BinaryMarshaler", Type, 2},
{"BinaryUnmarshaler", Type, 2}, {"BinaryUnmarshaler", Type, 2},
{"TextAppender", Type, 24},
{"TextMarshaler", Type, 2}, {"TextMarshaler", Type, 2},
{"TextUnmarshaler", Type, 2}, {"TextUnmarshaler", Type, 2},
}, },
@ -5984,13 +6113,16 @@ var PackageSymbols = map[string][]Symbol{
{"(*Interface).Complete", Method, 5}, {"(*Interface).Complete", Method, 5},
{"(*Interface).Embedded", Method, 5}, {"(*Interface).Embedded", Method, 5},
{"(*Interface).EmbeddedType", Method, 11}, {"(*Interface).EmbeddedType", Method, 11},
{"(*Interface).EmbeddedTypes", Method, 24},
{"(*Interface).Empty", Method, 5}, {"(*Interface).Empty", Method, 5},
{"(*Interface).ExplicitMethod", Method, 5}, {"(*Interface).ExplicitMethod", Method, 5},
{"(*Interface).ExplicitMethods", Method, 24},
{"(*Interface).IsComparable", Method, 18}, {"(*Interface).IsComparable", Method, 18},
{"(*Interface).IsImplicit", Method, 18}, {"(*Interface).IsImplicit", Method, 18},
{"(*Interface).IsMethodSet", Method, 18}, {"(*Interface).IsMethodSet", Method, 18},
{"(*Interface).MarkImplicit", Method, 18}, {"(*Interface).MarkImplicit", Method, 18},
{"(*Interface).Method", Method, 5}, {"(*Interface).Method", Method, 5},
{"(*Interface).Methods", Method, 24},
{"(*Interface).NumEmbeddeds", Method, 5}, {"(*Interface).NumEmbeddeds", Method, 5},
{"(*Interface).NumExplicitMethods", Method, 5}, {"(*Interface).NumExplicitMethods", Method, 5},
{"(*Interface).NumMethods", Method, 5}, {"(*Interface).NumMethods", Method, 5},
@ -6011,9 +6143,11 @@ var PackageSymbols = map[string][]Symbol{
{"(*MethodSet).At", Method, 5}, {"(*MethodSet).At", Method, 5},
{"(*MethodSet).Len", Method, 5}, {"(*MethodSet).Len", Method, 5},
{"(*MethodSet).Lookup", Method, 5}, {"(*MethodSet).Lookup", Method, 5},
{"(*MethodSet).Methods", Method, 24},
{"(*MethodSet).String", Method, 5}, {"(*MethodSet).String", Method, 5},
{"(*Named).AddMethod", Method, 5}, {"(*Named).AddMethod", Method, 5},
{"(*Named).Method", Method, 5}, {"(*Named).Method", Method, 5},
{"(*Named).Methods", Method, 24},
{"(*Named).NumMethods", Method, 5}, {"(*Named).NumMethods", Method, 5},
{"(*Named).Obj", Method, 5}, {"(*Named).Obj", Method, 5},
{"(*Named).Origin", Method, 18}, {"(*Named).Origin", Method, 18},
@ -6054,6 +6188,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*Pointer).String", Method, 5}, {"(*Pointer).String", Method, 5},
{"(*Pointer).Underlying", Method, 5}, {"(*Pointer).Underlying", Method, 5},
{"(*Scope).Child", Method, 5}, {"(*Scope).Child", Method, 5},
{"(*Scope).Children", Method, 24},
{"(*Scope).Contains", Method, 5}, {"(*Scope).Contains", Method, 5},
{"(*Scope).End", Method, 5}, {"(*Scope).End", Method, 5},
{"(*Scope).Innermost", Method, 5}, {"(*Scope).Innermost", Method, 5},
@ -6089,6 +6224,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*StdSizes).Offsetsof", Method, 5}, {"(*StdSizes).Offsetsof", Method, 5},
{"(*StdSizes).Sizeof", Method, 5}, {"(*StdSizes).Sizeof", Method, 5},
{"(*Struct).Field", Method, 5}, {"(*Struct).Field", Method, 5},
{"(*Struct).Fields", Method, 24},
{"(*Struct).NumFields", Method, 5}, {"(*Struct).NumFields", Method, 5},
{"(*Struct).String", Method, 5}, {"(*Struct).String", Method, 5},
{"(*Struct).Tag", Method, 5}, {"(*Struct).Tag", Method, 5},
@ -6100,8 +6236,10 @@ var PackageSymbols = map[string][]Symbol{
{"(*Tuple).Len", Method, 5}, {"(*Tuple).Len", Method, 5},
{"(*Tuple).String", Method, 5}, {"(*Tuple).String", Method, 5},
{"(*Tuple).Underlying", Method, 5}, {"(*Tuple).Underlying", Method, 5},
{"(*Tuple).Variables", Method, 24},
{"(*TypeList).At", Method, 18}, {"(*TypeList).At", Method, 18},
{"(*TypeList).Len", Method, 18}, {"(*TypeList).Len", Method, 18},
{"(*TypeList).Types", Method, 24},
{"(*TypeName).Exported", Method, 5}, {"(*TypeName).Exported", Method, 5},
{"(*TypeName).Id", Method, 5}, {"(*TypeName).Id", Method, 5},
{"(*TypeName).IsAlias", Method, 9}, {"(*TypeName).IsAlias", Method, 9},
@ -6119,9 +6257,11 @@ var PackageSymbols = map[string][]Symbol{
{"(*TypeParam).Underlying", Method, 18}, {"(*TypeParam).Underlying", Method, 18},
{"(*TypeParamList).At", Method, 18}, {"(*TypeParamList).At", Method, 18},
{"(*TypeParamList).Len", Method, 18}, {"(*TypeParamList).Len", Method, 18},
{"(*TypeParamList).TypeParams", Method, 24},
{"(*Union).Len", Method, 18}, {"(*Union).Len", Method, 18},
{"(*Union).String", Method, 18}, {"(*Union).String", Method, 18},
{"(*Union).Term", Method, 18}, {"(*Union).Term", Method, 18},
{"(*Union).Terms", Method, 24},
{"(*Union).Underlying", Method, 18}, {"(*Union).Underlying", Method, 18},
{"(*Var).Anonymous", Method, 5}, {"(*Var).Anonymous", Method, 5},
{"(*Var).Embedded", Method, 11}, {"(*Var).Embedded", Method, 11},
@ -6392,10 +6532,12 @@ var PackageSymbols = map[string][]Symbol{
{"(*Hash).WriteByte", Method, 14}, {"(*Hash).WriteByte", Method, 14},
{"(*Hash).WriteString", Method, 14}, {"(*Hash).WriteString", Method, 14},
{"Bytes", Func, 19}, {"Bytes", Func, 19},
{"Comparable", Func, 24},
{"Hash", Type, 14}, {"Hash", Type, 14},
{"MakeSeed", Func, 14}, {"MakeSeed", Func, 14},
{"Seed", Type, 14}, {"Seed", Type, 14},
{"String", Func, 19}, {"String", Func, 19},
{"WriteComparable", Func, 24},
}, },
"html": { "html": {
{"EscapeString", Func, 0}, {"EscapeString", Func, 0},
@ -7082,6 +7224,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*JSONHandler).WithGroup", Method, 21}, {"(*JSONHandler).WithGroup", Method, 21},
{"(*Level).UnmarshalJSON", Method, 21}, {"(*Level).UnmarshalJSON", Method, 21},
{"(*Level).UnmarshalText", Method, 21}, {"(*Level).UnmarshalText", Method, 21},
{"(*LevelVar).AppendText", Method, 24},
{"(*LevelVar).Level", Method, 21}, {"(*LevelVar).Level", Method, 21},
{"(*LevelVar).MarshalText", Method, 21}, {"(*LevelVar).MarshalText", Method, 21},
{"(*LevelVar).Set", Method, 21}, {"(*LevelVar).Set", Method, 21},
@ -7110,6 +7253,7 @@ var PackageSymbols = map[string][]Symbol{
{"(Attr).Equal", Method, 21}, {"(Attr).Equal", Method, 21},
{"(Attr).String", Method, 21}, {"(Attr).String", Method, 21},
{"(Kind).String", Method, 21}, {"(Kind).String", Method, 21},
{"(Level).AppendText", Method, 24},
{"(Level).Level", Method, 21}, {"(Level).Level", Method, 21},
{"(Level).MarshalJSON", Method, 21}, {"(Level).MarshalJSON", Method, 21},
{"(Level).MarshalText", Method, 21}, {"(Level).MarshalText", Method, 21},
@ -7140,6 +7284,7 @@ var PackageSymbols = map[string][]Symbol{
{"Debug", Func, 21}, {"Debug", Func, 21},
{"DebugContext", Func, 21}, {"DebugContext", Func, 21},
{"Default", Func, 21}, {"Default", Func, 21},
{"DiscardHandler", Var, 24},
{"Duration", Func, 21}, {"Duration", Func, 21},
{"DurationValue", Func, 21}, {"DurationValue", Func, 21},
{"Error", Func, 21}, {"Error", Func, 21},
@ -7375,6 +7520,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*Float).Acc", Method, 5}, {"(*Float).Acc", Method, 5},
{"(*Float).Add", Method, 5}, {"(*Float).Add", Method, 5},
{"(*Float).Append", Method, 5}, {"(*Float).Append", Method, 5},
{"(*Float).AppendText", Method, 24},
{"(*Float).Cmp", Method, 5}, {"(*Float).Cmp", Method, 5},
{"(*Float).Copy", Method, 5}, {"(*Float).Copy", Method, 5},
{"(*Float).Float32", Method, 5}, {"(*Float).Float32", Method, 5},
@ -7421,6 +7567,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*Int).And", Method, 0}, {"(*Int).And", Method, 0},
{"(*Int).AndNot", Method, 0}, {"(*Int).AndNot", Method, 0},
{"(*Int).Append", Method, 6}, {"(*Int).Append", Method, 6},
{"(*Int).AppendText", Method, 24},
{"(*Int).Binomial", Method, 0}, {"(*Int).Binomial", Method, 0},
{"(*Int).Bit", Method, 0}, {"(*Int).Bit", Method, 0},
{"(*Int).BitLen", Method, 0}, {"(*Int).BitLen", Method, 0},
@ -7477,6 +7624,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*Int).Xor", Method, 0}, {"(*Int).Xor", Method, 0},
{"(*Rat).Abs", Method, 0}, {"(*Rat).Abs", Method, 0},
{"(*Rat).Add", Method, 0}, {"(*Rat).Add", Method, 0},
{"(*Rat).AppendText", Method, 24},
{"(*Rat).Cmp", Method, 0}, {"(*Rat).Cmp", Method, 0},
{"(*Rat).Denom", Method, 0}, {"(*Rat).Denom", Method, 0},
{"(*Rat).Float32", Method, 4}, {"(*Rat).Float32", Method, 4},
@ -7659,11 +7807,13 @@ var PackageSymbols = map[string][]Symbol{
{"Zipf", Type, 0}, {"Zipf", Type, 0},
}, },
"math/rand/v2": { "math/rand/v2": {
{"(*ChaCha8).AppendBinary", Method, 24},
{"(*ChaCha8).MarshalBinary", Method, 22}, {"(*ChaCha8).MarshalBinary", Method, 22},
{"(*ChaCha8).Read", Method, 23}, {"(*ChaCha8).Read", Method, 23},
{"(*ChaCha8).Seed", Method, 22}, {"(*ChaCha8).Seed", Method, 22},
{"(*ChaCha8).Uint64", Method, 22}, {"(*ChaCha8).Uint64", Method, 22},
{"(*ChaCha8).UnmarshalBinary", Method, 22}, {"(*ChaCha8).UnmarshalBinary", Method, 22},
{"(*PCG).AppendBinary", Method, 24},
{"(*PCG).MarshalBinary", Method, 22}, {"(*PCG).MarshalBinary", Method, 22},
{"(*PCG).Seed", Method, 22}, {"(*PCG).Seed", Method, 22},
{"(*PCG).Uint64", Method, 22}, {"(*PCG).Uint64", Method, 22},
@ -7931,6 +8081,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*UnixListener).SyscallConn", Method, 10}, {"(*UnixListener).SyscallConn", Method, 10},
{"(Flags).String", Method, 0}, {"(Flags).String", Method, 0},
{"(HardwareAddr).String", Method, 0}, {"(HardwareAddr).String", Method, 0},
{"(IP).AppendText", Method, 24},
{"(IP).DefaultMask", Method, 0}, {"(IP).DefaultMask", Method, 0},
{"(IP).Equal", Method, 0}, {"(IP).Equal", Method, 0},
{"(IP).IsGlobalUnicast", Method, 0}, {"(IP).IsGlobalUnicast", Method, 0},
@ -8131,6 +8282,9 @@ var PackageSymbols = map[string][]Symbol{
{"(*MaxBytesError).Error", Method, 19}, {"(*MaxBytesError).Error", Method, 19},
{"(*ProtocolError).Error", Method, 0}, {"(*ProtocolError).Error", Method, 0},
{"(*ProtocolError).Is", Method, 21}, {"(*ProtocolError).Is", Method, 21},
{"(*Protocols).SetHTTP1", Method, 24},
{"(*Protocols).SetHTTP2", Method, 24},
{"(*Protocols).SetUnencryptedHTTP2", Method, 24},
{"(*Request).AddCookie", Method, 0}, {"(*Request).AddCookie", Method, 0},
{"(*Request).BasicAuth", Method, 4}, {"(*Request).BasicAuth", Method, 4},
{"(*Request).Clone", Method, 13}, {"(*Request).Clone", Method, 13},
@ -8190,6 +8344,10 @@ var PackageSymbols = map[string][]Symbol{
{"(Header).Values", Method, 14}, {"(Header).Values", Method, 14},
{"(Header).Write", Method, 0}, {"(Header).Write", Method, 0},
{"(Header).WriteSubset", Method, 0}, {"(Header).WriteSubset", Method, 0},
{"(Protocols).HTTP1", Method, 24},
{"(Protocols).HTTP2", Method, 24},
{"(Protocols).String", Method, 24},
{"(Protocols).UnencryptedHTTP2", Method, 24},
{"AllowQuerySemicolons", Func, 17}, {"AllowQuerySemicolons", Func, 17},
{"CanonicalHeaderKey", Func, 0}, {"CanonicalHeaderKey", Func, 0},
{"Client", Type, 0}, {"Client", Type, 0},
@ -8252,6 +8410,18 @@ var PackageSymbols = map[string][]Symbol{
{"FileSystem", Type, 0}, {"FileSystem", Type, 0},
{"Flusher", Type, 0}, {"Flusher", Type, 0},
{"Get", Func, 0}, {"Get", Func, 0},
{"HTTP2Config", Type, 24},
{"HTTP2Config.CountError", Field, 24},
{"HTTP2Config.MaxConcurrentStreams", Field, 24},
{"HTTP2Config.MaxDecoderHeaderTableSize", Field, 24},
{"HTTP2Config.MaxEncoderHeaderTableSize", Field, 24},
{"HTTP2Config.MaxReadFrameSize", Field, 24},
{"HTTP2Config.MaxReceiveBufferPerConnection", Field, 24},
{"HTTP2Config.MaxReceiveBufferPerStream", Field, 24},
{"HTTP2Config.PermitProhibitedCipherSuites", Field, 24},
{"HTTP2Config.PingTimeout", Field, 24},
{"HTTP2Config.SendPingTimeout", Field, 24},
{"HTTP2Config.WriteByteTimeout", Field, 24},
{"Handle", Func, 0}, {"Handle", Func, 0},
{"HandleFunc", Func, 0}, {"HandleFunc", Func, 0},
{"Handler", Type, 0}, {"Handler", Type, 0},
@ -8292,6 +8462,7 @@ var PackageSymbols = map[string][]Symbol{
{"PostForm", Func, 0}, {"PostForm", Func, 0},
{"ProtocolError", Type, 0}, {"ProtocolError", Type, 0},
{"ProtocolError.ErrorString", Field, 0}, {"ProtocolError.ErrorString", Field, 0},
{"Protocols", Type, 24},
{"ProxyFromEnvironment", Func, 0}, {"ProxyFromEnvironment", Func, 0},
{"ProxyURL", Func, 0}, {"ProxyURL", Func, 0},
{"PushOptions", Type, 8}, {"PushOptions", Type, 8},
@ -8361,9 +8532,11 @@ var PackageSymbols = map[string][]Symbol{
{"Server.ConnState", Field, 3}, {"Server.ConnState", Field, 3},
{"Server.DisableGeneralOptionsHandler", Field, 20}, {"Server.DisableGeneralOptionsHandler", Field, 20},
{"Server.ErrorLog", Field, 3}, {"Server.ErrorLog", Field, 3},
{"Server.HTTP2", Field, 24},
{"Server.Handler", Field, 0}, {"Server.Handler", Field, 0},
{"Server.IdleTimeout", Field, 8}, {"Server.IdleTimeout", Field, 8},
{"Server.MaxHeaderBytes", Field, 0}, {"Server.MaxHeaderBytes", Field, 0},
{"Server.Protocols", Field, 24},
{"Server.ReadHeaderTimeout", Field, 8}, {"Server.ReadHeaderTimeout", Field, 8},
{"Server.ReadTimeout", Field, 0}, {"Server.ReadTimeout", Field, 0},
{"Server.TLSConfig", Field, 0}, {"Server.TLSConfig", Field, 0},
@ -8453,12 +8626,14 @@ var PackageSymbols = map[string][]Symbol{
{"Transport.ExpectContinueTimeout", Field, 6}, {"Transport.ExpectContinueTimeout", Field, 6},
{"Transport.ForceAttemptHTTP2", Field, 13}, {"Transport.ForceAttemptHTTP2", Field, 13},
{"Transport.GetProxyConnectHeader", Field, 16}, {"Transport.GetProxyConnectHeader", Field, 16},
{"Transport.HTTP2", Field, 24},
{"Transport.IdleConnTimeout", Field, 7}, {"Transport.IdleConnTimeout", Field, 7},
{"Transport.MaxConnsPerHost", Field, 11}, {"Transport.MaxConnsPerHost", Field, 11},
{"Transport.MaxIdleConns", Field, 7}, {"Transport.MaxIdleConns", Field, 7},
{"Transport.MaxIdleConnsPerHost", Field, 0}, {"Transport.MaxIdleConnsPerHost", Field, 0},
{"Transport.MaxResponseHeaderBytes", Field, 7}, {"Transport.MaxResponseHeaderBytes", Field, 7},
{"Transport.OnProxyConnectResponse", Field, 20}, {"Transport.OnProxyConnectResponse", Field, 20},
{"Transport.Protocols", Field, 24},
{"Transport.Proxy", Field, 0}, {"Transport.Proxy", Field, 0},
{"Transport.ProxyConnectHeader", Field, 8}, {"Transport.ProxyConnectHeader", Field, 8},
{"Transport.ReadBufferSize", Field, 13}, {"Transport.ReadBufferSize", Field, 13},
@ -8646,6 +8821,8 @@ var PackageSymbols = map[string][]Symbol{
{"(*AddrPort).UnmarshalText", Method, 18}, {"(*AddrPort).UnmarshalText", Method, 18},
{"(*Prefix).UnmarshalBinary", Method, 18}, {"(*Prefix).UnmarshalBinary", Method, 18},
{"(*Prefix).UnmarshalText", Method, 18}, {"(*Prefix).UnmarshalText", Method, 18},
{"(Addr).AppendBinary", Method, 24},
{"(Addr).AppendText", Method, 24},
{"(Addr).AppendTo", Method, 18}, {"(Addr).AppendTo", Method, 18},
{"(Addr).As16", Method, 18}, {"(Addr).As16", Method, 18},
{"(Addr).As4", Method, 18}, {"(Addr).As4", Method, 18},
@ -8676,6 +8853,8 @@ var PackageSymbols = map[string][]Symbol{
{"(Addr).WithZone", Method, 18}, {"(Addr).WithZone", Method, 18},
{"(Addr).Zone", Method, 18}, {"(Addr).Zone", Method, 18},
{"(AddrPort).Addr", Method, 18}, {"(AddrPort).Addr", Method, 18},
{"(AddrPort).AppendBinary", Method, 24},
{"(AddrPort).AppendText", Method, 24},
{"(AddrPort).AppendTo", Method, 18}, {"(AddrPort).AppendTo", Method, 18},
{"(AddrPort).Compare", Method, 22}, {"(AddrPort).Compare", Method, 22},
{"(AddrPort).IsValid", Method, 18}, {"(AddrPort).IsValid", Method, 18},
@ -8684,6 +8863,8 @@ var PackageSymbols = map[string][]Symbol{
{"(AddrPort).Port", Method, 18}, {"(AddrPort).Port", Method, 18},
{"(AddrPort).String", Method, 18}, {"(AddrPort).String", Method, 18},
{"(Prefix).Addr", Method, 18}, {"(Prefix).Addr", Method, 18},
{"(Prefix).AppendBinary", Method, 24},
{"(Prefix).AppendText", Method, 24},
{"(Prefix).AppendTo", Method, 18}, {"(Prefix).AppendTo", Method, 18},
{"(Prefix).Bits", Method, 18}, {"(Prefix).Bits", Method, 18},
{"(Prefix).Contains", Method, 18}, {"(Prefix).Contains", Method, 18},
@ -8868,6 +9049,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*Error).Temporary", Method, 6}, {"(*Error).Temporary", Method, 6},
{"(*Error).Timeout", Method, 6}, {"(*Error).Timeout", Method, 6},
{"(*Error).Unwrap", Method, 13}, {"(*Error).Unwrap", Method, 13},
{"(*URL).AppendBinary", Method, 24},
{"(*URL).EscapedFragment", Method, 15}, {"(*URL).EscapedFragment", Method, 15},
{"(*URL).EscapedPath", Method, 5}, {"(*URL).EscapedPath", Method, 5},
{"(*URL).Hostname", Method, 8}, {"(*URL).Hostname", Method, 8},
@ -8967,6 +9149,17 @@ var PackageSymbols = map[string][]Symbol{
{"(*ProcessState).SysUsage", Method, 0}, {"(*ProcessState).SysUsage", Method, 0},
{"(*ProcessState).SystemTime", Method, 0}, {"(*ProcessState).SystemTime", Method, 0},
{"(*ProcessState).UserTime", Method, 0}, {"(*ProcessState).UserTime", Method, 0},
{"(*Root).Close", Method, 24},
{"(*Root).Create", Method, 24},
{"(*Root).FS", Method, 24},
{"(*Root).Lstat", Method, 24},
{"(*Root).Mkdir", Method, 24},
{"(*Root).Name", Method, 24},
{"(*Root).Open", Method, 24},
{"(*Root).OpenFile", Method, 24},
{"(*Root).OpenRoot", Method, 24},
{"(*Root).Remove", Method, 24},
{"(*Root).Stat", Method, 24},
{"(*SyscallError).Error", Method, 0}, {"(*SyscallError).Error", Method, 0},
{"(*SyscallError).Timeout", Method, 10}, {"(*SyscallError).Timeout", Method, 10},
{"(*SyscallError).Unwrap", Method, 13}, {"(*SyscallError).Unwrap", Method, 13},
@ -9060,6 +9253,8 @@ var PackageSymbols = map[string][]Symbol{
{"O_WRONLY", Const, 0}, {"O_WRONLY", Const, 0},
{"Open", Func, 0}, {"Open", Func, 0},
{"OpenFile", Func, 0}, {"OpenFile", Func, 0},
{"OpenInRoot", Func, 24},
{"OpenRoot", Func, 24},
{"PathError", Type, 0}, {"PathError", Type, 0},
{"PathError.Err", Field, 0}, {"PathError.Err", Field, 0},
{"PathError.Op", Field, 0}, {"PathError.Op", Field, 0},
@ -9081,6 +9276,7 @@ var PackageSymbols = map[string][]Symbol{
{"Remove", Func, 0}, {"Remove", Func, 0},
{"RemoveAll", Func, 0}, {"RemoveAll", Func, 0},
{"Rename", Func, 0}, {"Rename", Func, 0},
{"Root", Type, 24},
{"SEEK_CUR", Const, 0}, {"SEEK_CUR", Const, 0},
{"SEEK_END", Const, 0}, {"SEEK_END", Const, 0},
{"SEEK_SET", Const, 0}, {"SEEK_SET", Const, 0},
@ -9422,6 +9618,7 @@ var PackageSymbols = map[string][]Symbol{
{"Zero", Func, 0}, {"Zero", Func, 0},
}, },
"regexp": { "regexp": {
{"(*Regexp).AppendText", Method, 24},
{"(*Regexp).Copy", Method, 6}, {"(*Regexp).Copy", Method, 6},
{"(*Regexp).Expand", Method, 0}, {"(*Regexp).Expand", Method, 0},
{"(*Regexp).ExpandString", Method, 0}, {"(*Regexp).ExpandString", Method, 0},
@ -9602,6 +9799,8 @@ var PackageSymbols = map[string][]Symbol{
{"(*StackRecord).Stack", Method, 0}, {"(*StackRecord).Stack", Method, 0},
{"(*TypeAssertionError).Error", Method, 0}, {"(*TypeAssertionError).Error", Method, 0},
{"(*TypeAssertionError).RuntimeError", Method, 0}, {"(*TypeAssertionError).RuntimeError", Method, 0},
{"(Cleanup).Stop", Method, 24},
{"AddCleanup", Func, 24},
{"BlockProfile", Func, 1}, {"BlockProfile", Func, 1},
{"BlockProfileRecord", Type, 1}, {"BlockProfileRecord", Type, 1},
{"BlockProfileRecord.Count", Field, 1}, {"BlockProfileRecord.Count", Field, 1},
@ -9612,6 +9811,7 @@ var PackageSymbols = map[string][]Symbol{
{"Caller", Func, 0}, {"Caller", Func, 0},
{"Callers", Func, 0}, {"Callers", Func, 0},
{"CallersFrames", Func, 7}, {"CallersFrames", Func, 7},
{"Cleanup", Type, 24},
{"Compiler", Const, 0}, {"Compiler", Const, 0},
{"Error", Type, 0}, {"Error", Type, 0},
{"Frame", Type, 7}, {"Frame", Type, 7},
@ -9974,6 +10174,8 @@ var PackageSymbols = map[string][]Symbol{
{"EqualFold", Func, 0}, {"EqualFold", Func, 0},
{"Fields", Func, 0}, {"Fields", Func, 0},
{"FieldsFunc", Func, 0}, {"FieldsFunc", Func, 0},
{"FieldsFuncSeq", Func, 24},
{"FieldsSeq", Func, 24},
{"HasPrefix", Func, 0}, {"HasPrefix", Func, 0},
{"HasSuffix", Func, 0}, {"HasSuffix", Func, 0},
{"Index", Func, 0}, {"Index", Func, 0},
@ -9986,6 +10188,7 @@ var PackageSymbols = map[string][]Symbol{
{"LastIndexAny", Func, 0}, {"LastIndexAny", Func, 0},
{"LastIndexByte", Func, 5}, {"LastIndexByte", Func, 5},
{"LastIndexFunc", Func, 0}, {"LastIndexFunc", Func, 0},
{"Lines", Func, 24},
{"Map", Func, 0}, {"Map", Func, 0},
{"NewReader", Func, 0}, {"NewReader", Func, 0},
{"NewReplacer", Func, 0}, {"NewReplacer", Func, 0},
@ -9997,7 +10200,9 @@ var PackageSymbols = map[string][]Symbol{
{"Split", Func, 0}, {"Split", Func, 0},
{"SplitAfter", Func, 0}, {"SplitAfter", Func, 0},
{"SplitAfterN", Func, 0}, {"SplitAfterN", Func, 0},
{"SplitAfterSeq", Func, 24},
{"SplitN", Func, 0}, {"SplitN", Func, 0},
{"SplitSeq", Func, 24},
{"Title", Func, 0}, {"Title", Func, 0},
{"ToLower", Func, 0}, {"ToLower", Func, 0},
{"ToLowerSpecial", Func, 0}, {"ToLowerSpecial", Func, 0},
@ -16413,7 +16618,9 @@ var PackageSymbols = map[string][]Symbol{
{"ValueOf", Func, 0}, {"ValueOf", Func, 0},
}, },
"testing": { "testing": {
{"(*B).Chdir", Method, 24},
{"(*B).Cleanup", Method, 14}, {"(*B).Cleanup", Method, 14},
{"(*B).Context", Method, 24},
{"(*B).Elapsed", Method, 20}, {"(*B).Elapsed", Method, 20},
{"(*B).Error", Method, 0}, {"(*B).Error", Method, 0},
{"(*B).Errorf", Method, 0}, {"(*B).Errorf", Method, 0},
@ -16425,6 +16632,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*B).Helper", Method, 9}, {"(*B).Helper", Method, 9},
{"(*B).Log", Method, 0}, {"(*B).Log", Method, 0},
{"(*B).Logf", Method, 0}, {"(*B).Logf", Method, 0},
{"(*B).Loop", Method, 24},
{"(*B).Name", Method, 8}, {"(*B).Name", Method, 8},
{"(*B).ReportAllocs", Method, 1}, {"(*B).ReportAllocs", Method, 1},
{"(*B).ReportMetric", Method, 13}, {"(*B).ReportMetric", Method, 13},
@ -16442,7 +16650,9 @@ var PackageSymbols = map[string][]Symbol{
{"(*B).StopTimer", Method, 0}, {"(*B).StopTimer", Method, 0},
{"(*B).TempDir", Method, 15}, {"(*B).TempDir", Method, 15},
{"(*F).Add", Method, 18}, {"(*F).Add", Method, 18},
{"(*F).Chdir", Method, 24},
{"(*F).Cleanup", Method, 18}, {"(*F).Cleanup", Method, 18},
{"(*F).Context", Method, 24},
{"(*F).Error", Method, 18}, {"(*F).Error", Method, 18},
{"(*F).Errorf", Method, 18}, {"(*F).Errorf", Method, 18},
{"(*F).Fail", Method, 18}, {"(*F).Fail", Method, 18},
@ -16463,7 +16673,9 @@ var PackageSymbols = map[string][]Symbol{
{"(*F).TempDir", Method, 18}, {"(*F).TempDir", Method, 18},
{"(*M).Run", Method, 4}, {"(*M).Run", Method, 4},
{"(*PB).Next", Method, 3}, {"(*PB).Next", Method, 3},
{"(*T).Chdir", Method, 24},
{"(*T).Cleanup", Method, 14}, {"(*T).Cleanup", Method, 14},
{"(*T).Context", Method, 24},
{"(*T).Deadline", Method, 15}, {"(*T).Deadline", Method, 15},
{"(*T).Error", Method, 0}, {"(*T).Error", Method, 0},
{"(*T).Errorf", Method, 0}, {"(*T).Errorf", Method, 0},
@ -16954,7 +17166,9 @@ var PackageSymbols = map[string][]Symbol{
{"(Time).Add", Method, 0}, {"(Time).Add", Method, 0},
{"(Time).AddDate", Method, 0}, {"(Time).AddDate", Method, 0},
{"(Time).After", Method, 0}, {"(Time).After", Method, 0},
{"(Time).AppendBinary", Method, 24},
{"(Time).AppendFormat", Method, 5}, {"(Time).AppendFormat", Method, 5},
{"(Time).AppendText", Method, 24},
{"(Time).Before", Method, 0}, {"(Time).Before", Method, 0},
{"(Time).Clock", Method, 0}, {"(Time).Clock", Method, 0},
{"(Time).Compare", Method, 20}, {"(Time).Compare", Method, 20},
@ -17428,4 +17642,9 @@ var PackageSymbols = map[string][]Symbol{
{"String", Func, 0}, {"String", Func, 0},
{"StringData", Func, 0}, {"StringData", Func, 0},
}, },
"weak": {
{"(Pointer).Value", Method, 24},
{"Make", Func, 24},
{"Pointer", Type, 24},
},
} }

View File

@ -66,75 +66,3 @@ func IsTypeParam(t types.Type) bool {
_, ok := types.Unalias(t).(*types.TypeParam) _, ok := types.Unalias(t).(*types.TypeParam)
return ok return ok
} }
// GenericAssignableTo is a generalization of types.AssignableTo that
// implements the following rule for uninstantiated generic types:
//
// If V and T are generic named types, then V is considered assignable to T if,
// for every possible instantiation of V[A_1, ..., A_N], the instantiation
// T[A_1, ..., A_N] is valid and V[A_1, ..., A_N] implements T[A_1, ..., A_N].
//
// If T has structural constraints, they must be satisfied by V.
//
// For example, consider the following type declarations:
//
// type Interface[T any] interface {
// Accept(T)
// }
//
// type Container[T any] struct {
// Element T
// }
//
// func (c Container[T]) Accept(t T) { c.Element = t }
//
// In this case, GenericAssignableTo reports that instantiations of Container
// are assignable to the corresponding instantiation of Interface.
func GenericAssignableTo(ctxt *types.Context, V, T types.Type) bool {
V = types.Unalias(V)
T = types.Unalias(T)
// If V and T are not both named, or do not have matching non-empty type
// parameter lists, fall back on types.AssignableTo.
VN, Vnamed := V.(*types.Named)
TN, Tnamed := T.(*types.Named)
if !Vnamed || !Tnamed {
return types.AssignableTo(V, T)
}
vtparams := VN.TypeParams()
ttparams := TN.TypeParams()
if vtparams.Len() == 0 || vtparams.Len() != ttparams.Len() || VN.TypeArgs().Len() != 0 || TN.TypeArgs().Len() != 0 {
return types.AssignableTo(V, T)
}
// V and T have the same (non-zero) number of type params. Instantiate both
// with the type parameters of V. This must always succeed for V, and will
// succeed for T if and only if the type set of each type parameter of V is a
// subset of the type set of the corresponding type parameter of T, meaning
// that every instantiation of V corresponds to a valid instantiation of T.
// Minor optimization: ensure we share a context across the two
// instantiations below.
if ctxt == nil {
ctxt = types.NewContext()
}
var targs []types.Type
for i := 0; i < vtparams.Len(); i++ {
targs = append(targs, vtparams.At(i))
}
vinst, err := types.Instantiate(ctxt, V, targs, true)
if err != nil {
panic("type parameters should satisfy their own constraints")
}
tinst, err := types.Instantiate(ctxt, T, targs, true)
if err != nil {
return false
}
return types.AssignableTo(vinst, tinst)
}

View File

@ -0,0 +1,46 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typesinternal
import (
"go/ast"
"go/types"
"strconv"
)
// FileQualifier returns a [types.Qualifier] function that qualifies
// imported symbols appropriately based on the import environment of a given
// file.
// If the same package is imported multiple times, the last appearance is
// recorded.
func FileQualifier(f *ast.File, pkg *types.Package) types.Qualifier {
// Construct mapping of import paths to their defined names.
// It is only necessary to look at renaming imports.
imports := make(map[string]string)
for _, imp := range f.Imports {
if imp.Name != nil && imp.Name.Name != "_" {
path, _ := strconv.Unquote(imp.Path.Value)
imports[path] = imp.Name.Name
}
}
// Define qualifier to replace full package paths with names of the imports.
return func(p *types.Package) string {
if p == nil || p == pkg {
return ""
}
if name, ok := imports[p.Path()]; ok {
if name == "." {
return ""
} else {
return name
}
}
// If there is no local renaming, fall back to the package name.
return p.Name()
}
}

View File

@ -11,6 +11,8 @@ import (
// ReceiverNamed returns the named type (if any) associated with the // ReceiverNamed returns the named type (if any) associated with the
// type of recv, which may be of the form N or *N, or aliases thereof. // type of recv, which may be of the form N or *N, or aliases thereof.
// It also reports whether a Pointer was present. // It also reports whether a Pointer was present.
//
// The named result may be nil in ill-typed code.
func ReceiverNamed(recv *types.Var) (isPtr bool, named *types.Named) { func ReceiverNamed(recv *types.Var) (isPtr bool, named *types.Named) {
t := recv.Type() t := recv.Type()
if ptr, ok := types.Unalias(t).(*types.Pointer); ok { if ptr, ok := types.Unalias(t).(*types.Pointer); ok {

View File

@ -82,6 +82,7 @@ func NameRelativeTo(pkg *types.Package) types.Qualifier {
type NamedOrAlias interface { type NamedOrAlias interface {
types.Type types.Type
Obj() *types.TypeName Obj() *types.TypeName
// TODO(hxjiang): add method TypeArgs() *types.TypeList after stop supporting go1.22.
} }
// TypeParams is a light shim around t.TypeParams(). // TypeParams is a light shim around t.TypeParams().

View File

@ -9,62 +9,97 @@ import (
"go/ast" "go/ast"
"go/token" "go/token"
"go/types" "go/types"
"strconv"
"strings" "strings"
) )
// ZeroString returns the string representation of the "zero" value of the type t. // ZeroString returns the string representation of the zero value for any type t.
// This string can be used on the right-hand side of an assignment where the // The boolean result indicates whether the type is or contains an invalid type
// left-hand side has that explicit type. // or a non-basic (constraint) interface type.
// Exception: This does not apply to tuples. Their string representation is //
// informational only and cannot be used in an assignment. // Even for invalid input types, ZeroString may return a partially correct
// string representation. The caller should use the returned isValid boolean
// to determine the validity of the expression.
//
// When assigning to a wider type (such as 'any'), it's the caller's // When assigning to a wider type (such as 'any'), it's the caller's
// responsibility to handle any necessary type conversions. // responsibility to handle any necessary type conversions.
//
// This string can be used on the right-hand side of an assignment where the
// left-hand side has that explicit type.
// References to named types are qualified by an appropriate (optional)
// qualifier function.
// Exception: This does not apply to tuples. Their string representation is
// informational only and cannot be used in an assignment.
//
// See [ZeroExpr] for a variant that returns an [ast.Expr]. // See [ZeroExpr] for a variant that returns an [ast.Expr].
func ZeroString(t types.Type, qf types.Qualifier) string { func ZeroString(t types.Type, qual types.Qualifier) (_ string, isValid bool) {
switch t := t.(type) { switch t := t.(type) {
case *types.Basic: case *types.Basic:
switch { switch {
case t.Info()&types.IsBoolean != 0: case t.Info()&types.IsBoolean != 0:
return "false" return "false", true
case t.Info()&types.IsNumeric != 0: case t.Info()&types.IsNumeric != 0:
return "0" return "0", true
case t.Info()&types.IsString != 0: case t.Info()&types.IsString != 0:
return `""` return `""`, true
case t.Kind() == types.UnsafePointer: case t.Kind() == types.UnsafePointer:
fallthrough fallthrough
case t.Kind() == types.UntypedNil: case t.Kind() == types.UntypedNil:
return "nil" return "nil", true
case t.Kind() == types.Invalid:
return "invalid", false
default: default:
panic(fmt.Sprint("ZeroString for unexpected type:", t)) panic(fmt.Sprintf("ZeroString for unexpected type %v", t))
} }
case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: case *types.Pointer, *types.Slice, *types.Chan, *types.Map, *types.Signature:
return "nil" return "nil", true
case *types.Named, *types.Alias: case *types.Interface:
if !t.IsMethodSet() {
return "invalid", false
}
return "nil", true
case *types.Named:
switch under := t.Underlying().(type) { switch under := t.Underlying().(type) {
case *types.Struct, *types.Array: case *types.Struct, *types.Array:
return types.TypeString(t, qf) + "{}" return types.TypeString(t, qual) + "{}", true
default: default:
return ZeroString(under, qf) return ZeroString(under, qual)
}
case *types.Alias:
switch t.Underlying().(type) {
case *types.Struct, *types.Array:
return types.TypeString(t, qual) + "{}", true
default:
// A type parameter can have alias but alias type's underlying type
// can never be a type parameter.
// Use types.Unalias to preserve the info of type parameter instead
// of call Underlying() going right through and get the underlying
// type of the type parameter which is always an interface.
return ZeroString(types.Unalias(t), qual)
} }
case *types.Array, *types.Struct: case *types.Array, *types.Struct:
return types.TypeString(t, qf) + "{}" return types.TypeString(t, qual) + "{}", true
case *types.TypeParam: case *types.TypeParam:
// Assumes func new is not shadowed. // Assumes func new is not shadowed.
return "*new(" + types.TypeString(t, qf) + ")" return "*new(" + types.TypeString(t, qual) + ")", true
case *types.Tuple: case *types.Tuple:
// Tuples are not normal values. // Tuples are not normal values.
// We are currently format as "(t[0], ..., t[n])". Could be something else. // We are currently format as "(t[0], ..., t[n])". Could be something else.
isValid := true
components := make([]string, t.Len()) components := make([]string, t.Len())
for i := 0; i < t.Len(); i++ { for i := 0; i < t.Len(); i++ {
components[i] = ZeroString(t.At(i).Type(), qf) comp, ok := ZeroString(t.At(i).Type(), qual)
components[i] = comp
isValid = isValid && ok
} }
return "(" + strings.Join(components, ", ") + ")" return "(" + strings.Join(components, ", ") + ")", isValid
case *types.Union: case *types.Union:
// Variables of these types cannot be created, so it makes // Variables of these types cannot be created, so it makes
@ -76,45 +111,72 @@ func ZeroString(t types.Type, qf types.Qualifier) string {
} }
} }
// ZeroExpr returns the ast.Expr representation of the "zero" value of the type t. // ZeroExpr returns the ast.Expr representation of the zero value for any type t.
// ZeroExpr is defined for types that are suitable for variables. // The boolean result indicates whether the type is or contains an invalid type
// It may panic for other types such as Tuple or Union. // or a non-basic (constraint) interface type.
//
// Even for invalid input types, ZeroExpr may return a partially correct ast.Expr
// representation. The caller should use the returned isValid boolean to determine
// the validity of the expression.
//
// This function is designed for types suitable for variables and should not be
// used with Tuple or Union types.References to named types are qualified by an
// appropriate (optional) qualifier function.
//
// See [ZeroString] for a variant that returns a string. // See [ZeroString] for a variant that returns a string.
func ZeroExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { func ZeroExpr(t types.Type, qual types.Qualifier) (_ ast.Expr, isValid bool) {
switch t := typ.(type) { switch t := t.(type) {
case *types.Basic: case *types.Basic:
switch { switch {
case t.Info()&types.IsBoolean != 0: case t.Info()&types.IsBoolean != 0:
return &ast.Ident{Name: "false"} return &ast.Ident{Name: "false"}, true
case t.Info()&types.IsNumeric != 0: case t.Info()&types.IsNumeric != 0:
return &ast.BasicLit{Kind: token.INT, Value: "0"} return &ast.BasicLit{Kind: token.INT, Value: "0"}, true
case t.Info()&types.IsString != 0: case t.Info()&types.IsString != 0:
return &ast.BasicLit{Kind: token.STRING, Value: `""`} return &ast.BasicLit{Kind: token.STRING, Value: `""`}, true
case t.Kind() == types.UnsafePointer: case t.Kind() == types.UnsafePointer:
fallthrough fallthrough
case t.Kind() == types.UntypedNil: case t.Kind() == types.UntypedNil:
return ast.NewIdent("nil") return ast.NewIdent("nil"), true
case t.Kind() == types.Invalid:
return &ast.BasicLit{Kind: token.STRING, Value: `"invalid"`}, false
default: default:
panic(fmt.Sprint("ZeroExpr for unexpected type:", t)) panic(fmt.Sprintf("ZeroExpr for unexpected type %v", t))
} }
case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: case *types.Pointer, *types.Slice, *types.Chan, *types.Map, *types.Signature:
return ast.NewIdent("nil") return ast.NewIdent("nil"), true
case *types.Named, *types.Alias: case *types.Interface:
if !t.IsMethodSet() {
return &ast.BasicLit{Kind: token.STRING, Value: `"invalid"`}, false
}
return ast.NewIdent("nil"), true
case *types.Named:
switch under := t.Underlying().(type) { switch under := t.Underlying().(type) {
case *types.Struct, *types.Array: case *types.Struct, *types.Array:
return &ast.CompositeLit{ return &ast.CompositeLit{
Type: TypeExpr(f, pkg, typ), Type: TypeExpr(t, qual),
} }, true
default: default:
return ZeroExpr(f, pkg, under) return ZeroExpr(under, qual)
}
case *types.Alias:
switch t.Underlying().(type) {
case *types.Struct, *types.Array:
return &ast.CompositeLit{
Type: TypeExpr(t, qual),
}, true
default:
return ZeroExpr(types.Unalias(t), qual)
} }
case *types.Array, *types.Struct: case *types.Array, *types.Struct:
return &ast.CompositeLit{ return &ast.CompositeLit{
Type: TypeExpr(f, pkg, typ), Type: TypeExpr(t, qual),
} }, true
case *types.TypeParam: case *types.TypeParam:
return &ast.StarExpr{ // *new(T) return &ast.StarExpr{ // *new(T)
@ -125,7 +187,7 @@ func ZeroExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
ast.NewIdent(t.Obj().Name()), ast.NewIdent(t.Obj().Name()),
}, },
}, },
} }, true
case *types.Tuple: case *types.Tuple:
// Unlike ZeroString, there is no ast.Expr can express tuple by // Unlike ZeroString, there is no ast.Expr can express tuple by
@ -157,16 +219,14 @@ func IsZeroExpr(expr ast.Expr) bool {
} }
// TypeExpr returns syntax for the specified type. References to named types // TypeExpr returns syntax for the specified type. References to named types
// from packages other than pkg are qualified by an appropriate package name, as // are qualified by an appropriate (optional) qualifier function.
// defined by the import environment of file.
// It may panic for types such as Tuple or Union. // It may panic for types such as Tuple or Union.
func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { func TypeExpr(t types.Type, qual types.Qualifier) ast.Expr {
switch t := typ.(type) { switch t := t.(type) {
case *types.Basic: case *types.Basic:
switch t.Kind() { switch t.Kind() {
case types.UnsafePointer: case types.UnsafePointer:
// TODO(hxjiang): replace the implementation with types.Qualifier. return &ast.SelectorExpr{X: ast.NewIdent(qual(types.NewPackage("unsafe", "unsafe"))), Sel: ast.NewIdent("Pointer")}
return &ast.SelectorExpr{X: ast.NewIdent("unsafe"), Sel: ast.NewIdent("Pointer")}
default: default:
return ast.NewIdent(t.Name()) return ast.NewIdent(t.Name())
} }
@ -174,7 +234,7 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
case *types.Pointer: case *types.Pointer:
return &ast.UnaryExpr{ return &ast.UnaryExpr{
Op: token.MUL, Op: token.MUL,
X: TypeExpr(f, pkg, t.Elem()), X: TypeExpr(t.Elem(), qual),
} }
case *types.Array: case *types.Array:
@ -183,18 +243,18 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
Kind: token.INT, Kind: token.INT,
Value: fmt.Sprintf("%d", t.Len()), Value: fmt.Sprintf("%d", t.Len()),
}, },
Elt: TypeExpr(f, pkg, t.Elem()), Elt: TypeExpr(t.Elem(), qual),
} }
case *types.Slice: case *types.Slice:
return &ast.ArrayType{ return &ast.ArrayType{
Elt: TypeExpr(f, pkg, t.Elem()), Elt: TypeExpr(t.Elem(), qual),
} }
case *types.Map: case *types.Map:
return &ast.MapType{ return &ast.MapType{
Key: TypeExpr(f, pkg, t.Key()), Key: TypeExpr(t.Key(), qual),
Value: TypeExpr(f, pkg, t.Elem()), Value: TypeExpr(t.Elem(), qual),
} }
case *types.Chan: case *types.Chan:
@ -204,14 +264,14 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
} }
return &ast.ChanType{ return &ast.ChanType{
Dir: dir, Dir: dir,
Value: TypeExpr(f, pkg, t.Elem()), Value: TypeExpr(t.Elem(), qual),
} }
case *types.Signature: case *types.Signature:
var params []*ast.Field var params []*ast.Field
for i := 0; i < t.Params().Len(); i++ { for i := 0; i < t.Params().Len(); i++ {
params = append(params, &ast.Field{ params = append(params, &ast.Field{
Type: TypeExpr(f, pkg, t.Params().At(i).Type()), Type: TypeExpr(t.Params().At(i).Type(), qual),
Names: []*ast.Ident{ Names: []*ast.Ident{
{ {
Name: t.Params().At(i).Name(), Name: t.Params().At(i).Name(),
@ -226,7 +286,7 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
var returns []*ast.Field var returns []*ast.Field
for i := 0; i < t.Results().Len(); i++ { for i := 0; i < t.Results().Len(); i++ {
returns = append(returns, &ast.Field{ returns = append(returns, &ast.Field{
Type: TypeExpr(f, pkg, t.Results().At(i).Type()), Type: TypeExpr(t.Results().At(i).Type(), qual),
}) })
} }
return &ast.FuncType{ return &ast.FuncType{
@ -238,23 +298,9 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
}, },
} }
case interface{ Obj() *types.TypeName }: // *types.{Alias,Named,TypeParam} case *types.TypeParam:
switch t.Obj().Pkg() { pkgName := qual(t.Obj().Pkg())
case pkg, nil: if pkgName == "" || t.Obj().Pkg() == nil {
return ast.NewIdent(t.Obj().Name())
}
pkgName := t.Obj().Pkg().Name()
// TODO(hxjiang): replace the implementation with types.Qualifier.
// If the file already imports the package under another name, use that.
for _, cand := range f.Imports {
if path, _ := strconv.Unquote(cand.Path.Value); path == t.Obj().Pkg().Path() {
if cand.Name != nil && cand.Name.Name != "" {
pkgName = cand.Name.Name
}
}
}
if pkgName == "." {
return ast.NewIdent(t.Obj().Name()) return ast.NewIdent(t.Obj().Name())
} }
return &ast.SelectorExpr{ return &ast.SelectorExpr{
@ -262,6 +308,36 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
Sel: ast.NewIdent(t.Obj().Name()), Sel: ast.NewIdent(t.Obj().Name()),
} }
// types.TypeParam also implements interface NamedOrAlias. To differentiate,
// case TypeParam need to be present before case NamedOrAlias.
// TODO(hxjiang): remove this comment once TypeArgs() is added to interface
// NamedOrAlias.
case NamedOrAlias:
var expr ast.Expr = ast.NewIdent(t.Obj().Name())
if pkgName := qual(t.Obj().Pkg()); pkgName != "." && pkgName != "" {
expr = &ast.SelectorExpr{
X: ast.NewIdent(pkgName),
Sel: expr.(*ast.Ident),
}
}
// TODO(hxjiang): call t.TypeArgs after adding method TypeArgs() to
// typesinternal.NamedOrAlias.
if hasTypeArgs, ok := t.(interface{ TypeArgs() *types.TypeList }); ok {
if typeArgs := hasTypeArgs.TypeArgs(); typeArgs != nil && typeArgs.Len() > 0 {
var indices []ast.Expr
for i := range typeArgs.Len() {
indices = append(indices, TypeExpr(typeArgs.At(i), qual))
}
expr = &ast.IndexListExpr{
X: expr,
Indices: indices,
}
}
}
return expr
case *types.Struct: case *types.Struct:
return ast.NewIdent(t.String()) return ast.NewIdent(t.String())
@ -269,9 +345,43 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
return ast.NewIdent(t.String()) return ast.NewIdent(t.String())
case *types.Union: case *types.Union:
// TODO(hxjiang): handle the union through syntax (~A | ... | ~Z). if t.Len() == 0 {
// Remove nil check when calling typesinternal.TypeExpr. panic("Union type should have at least one term")
return nil }
// Same as go/ast, the return expression will put last term in the
// Y field at topmost level of BinaryExpr.
// For union of type "float32 | float64 | int64", the structure looks
// similar to:
// {
// X: {
// X: float32,
// Op: |
// Y: float64,
// }
// Op: |,
// Y: int64,
// }
var union ast.Expr
for i := range t.Len() {
term := t.Term(i)
termExpr := TypeExpr(term.Type(), qual)
if term.Tilde() {
termExpr = &ast.UnaryExpr{
Op: token.TILDE,
X: termExpr,
}
}
if i == 0 {
union = termExpr
} else {
union = &ast.BinaryExpr{
X: union,
Op: token.OR,
Y: termExpr,
}
}
}
return union
case *types.Tuple: case *types.Tuple:
panic("invalid input type types.Tuple") panic("invalid input type types.Tuple")

View File

@ -88,9 +88,7 @@ func opaqueInitHook(mi *MessageInfo) bool {
mi.oneofs = map[protoreflect.Name]*oneofInfo{} mi.oneofs = map[protoreflect.Name]*oneofInfo{}
for i := 0; i < mi.Desc.Oneofs().Len(); i++ { for i := 0; i < mi.Desc.Oneofs().Len(); i++ {
od := mi.Desc.Oneofs().Get(i) od := mi.Desc.Oneofs().Get(i)
if !od.IsSynthetic() { mi.oneofs[od.Name()] = makeOneofInfoOpaque(mi, od, si.structInfo, mi.Exporter)
mi.oneofs[od.Name()] = makeOneofInfo(od, si.structInfo, mi.Exporter)
}
} }
mi.denseFields = make([]*fieldInfo, fds.Len()*2) mi.denseFields = make([]*fieldInfo, fds.Len()*2)
@ -119,6 +117,26 @@ func opaqueInitHook(mi *MessageInfo) bool {
return true return true
} }
func makeOneofInfoOpaque(mi *MessageInfo, od protoreflect.OneofDescriptor, si structInfo, x exporter) *oneofInfo {
oi := &oneofInfo{oneofDesc: od}
if od.IsSynthetic() {
fd := od.Fields().Get(0)
index, _ := presenceIndex(mi.Desc, fd)
oi.which = func(p pointer) protoreflect.FieldNumber {
if p.IsNil() {
return 0
}
if !mi.present(p, index) {
return 0
}
return od.Fields().Get(0).Number()
}
return oi
}
// Dispatch to non-opaque oneof implementation for non-synthetic oneofs.
return makeOneofInfo(od, si, x)
}
func (mi *MessageInfo) fieldInfoForMapOpaque(si opaqueStructInfo, fd protoreflect.FieldDescriptor, fs reflect.StructField) fieldInfo { func (mi *MessageInfo) fieldInfoForMapOpaque(si opaqueStructInfo, fd protoreflect.FieldDescriptor, fs reflect.StructField) fieldInfo {
ft := fs.Type ft := fs.Type
if ft.Kind() != reflect.Map { if ft.Kind() != reflect.Map {

View File

@ -52,7 +52,7 @@ import (
const ( const (
Major = 1 Major = 1
Minor = 36 Minor = 36
Patch = 1 Patch = 2
PreRelease = "" PreRelease = ""
) )

26
vendor/modules.txt vendored
View File

@ -442,7 +442,7 @@ github.com/moby/sys/user
# github.com/moby/sys/userns v0.1.0 # github.com/moby/sys/userns v0.1.0
## explicit; go 1.21 ## explicit; go 1.21
github.com/moby/sys/userns github.com/moby/sys/userns
# github.com/moby/term v0.5.0 # github.com/moby/term v0.5.2
## explicit; go 1.18 ## explicit; go 1.18
github.com/moby/term github.com/moby/term
github.com/moby/term/windows github.com/moby/term/windows
@ -626,15 +626,15 @@ go.opentelemetry.io/otel/sdk/metric/metricdata
go.opentelemetry.io/otel/trace go.opentelemetry.io/otel/trace
go.opentelemetry.io/otel/trace/embedded go.opentelemetry.io/otel/trace/embedded
go.opentelemetry.io/otel/trace/noop go.opentelemetry.io/otel/trace/noop
# go.opentelemetry.io/proto/otlp v1.4.0 # go.opentelemetry.io/proto/otlp v1.5.0
## explicit; go 1.22.7 ## explicit; go 1.22.0
go.opentelemetry.io/proto/otlp/collector/metrics/v1 go.opentelemetry.io/proto/otlp/collector/metrics/v1
go.opentelemetry.io/proto/otlp/collector/trace/v1 go.opentelemetry.io/proto/otlp/collector/trace/v1
go.opentelemetry.io/proto/otlp/common/v1 go.opentelemetry.io/proto/otlp/common/v1
go.opentelemetry.io/proto/otlp/metrics/v1 go.opentelemetry.io/proto/otlp/metrics/v1
go.opentelemetry.io/proto/otlp/resource/v1 go.opentelemetry.io/proto/otlp/resource/v1
go.opentelemetry.io/proto/otlp/trace/v1 go.opentelemetry.io/proto/otlp/trace/v1
# golang.org/x/crypto v0.31.0 # golang.org/x/crypto v0.32.0
## explicit; go 1.20 ## explicit; go 1.20
golang.org/x/crypto/argon2 golang.org/x/crypto/argon2
golang.org/x/crypto/blake2b golang.org/x/crypto/blake2b
@ -652,7 +652,7 @@ golang.org/x/crypto/ssh
golang.org/x/crypto/ssh/agent golang.org/x/crypto/ssh/agent
golang.org/x/crypto/ssh/internal/bcrypt_pbkdf golang.org/x/crypto/ssh/internal/bcrypt_pbkdf
golang.org/x/crypto/ssh/knownhosts golang.org/x/crypto/ssh/knownhosts
# golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329 # golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8
## explicit; go 1.22.0 ## explicit; go 1.22.0
golang.org/x/exp/slices golang.org/x/exp/slices
golang.org/x/exp/slog golang.org/x/exp/slog
@ -661,7 +661,7 @@ golang.org/x/exp/slog/internal/buffer
# golang.org/x/mod v0.22.0 # golang.org/x/mod v0.22.0
## explicit; go 1.22.0 ## explicit; go 1.22.0
golang.org/x/mod/semver golang.org/x/mod/semver
# golang.org/x/net v0.33.0 # golang.org/x/net v0.34.0
## explicit; go 1.18 ## explicit; go 1.18
golang.org/x/net/context golang.org/x/net/context
golang.org/x/net/http/httpguts golang.org/x/net/http/httpguts
@ -675,7 +675,7 @@ golang.org/x/net/trace
# golang.org/x/sync v0.10.0 # golang.org/x/sync v0.10.0
## explicit; go 1.18 ## explicit; go 1.18
golang.org/x/sync/errgroup golang.org/x/sync/errgroup
# golang.org/x/sys v0.28.0 # golang.org/x/sys v0.29.0
## explicit; go 1.18 ## explicit; go 1.18
golang.org/x/sys/cpu golang.org/x/sys/cpu
golang.org/x/sys/execabs golang.org/x/sys/execabs
@ -683,7 +683,7 @@ golang.org/x/sys/plan9
golang.org/x/sys/unix golang.org/x/sys/unix
golang.org/x/sys/windows golang.org/x/sys/windows
golang.org/x/sys/windows/registry golang.org/x/sys/windows/registry
# golang.org/x/term v0.27.0 # golang.org/x/term v0.28.0
## explicit; go 1.18 ## explicit; go 1.18
golang.org/x/term golang.org/x/term
# golang.org/x/text v0.21.0 # golang.org/x/text v0.21.0
@ -699,10 +699,10 @@ golang.org/x/text/transform
golang.org/x/text/unicode/bidi golang.org/x/text/unicode/bidi
golang.org/x/text/unicode/norm golang.org/x/text/unicode/norm
golang.org/x/text/width golang.org/x/text/width
# golang.org/x/time v0.8.0 # golang.org/x/time v0.9.0
## explicit; go 1.18 ## explicit; go 1.18
golang.org/x/time/rate golang.org/x/time/rate
# golang.org/x/tools v0.28.0 # golang.org/x/tools v0.29.0
## explicit; go 1.22.0 ## explicit; go 1.22.0
golang.org/x/tools/go/gcexportdata golang.org/x/tools/go/gcexportdata
golang.org/x/tools/go/packages golang.org/x/tools/go/packages
@ -721,10 +721,10 @@ golang.org/x/tools/internal/stdlib
golang.org/x/tools/internal/typeparams golang.org/x/tools/internal/typeparams
golang.org/x/tools/internal/typesinternal golang.org/x/tools/internal/typesinternal
golang.org/x/tools/internal/versions golang.org/x/tools/internal/versions
# google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d # google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422
## explicit; go 1.22 ## explicit; go 1.22
google.golang.org/genproto/googleapis/api/httpbody google.golang.org/genproto/googleapis/api/httpbody
# google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d # google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422
## explicit; go 1.22 ## explicit; go 1.22
google.golang.org/genproto/googleapis/rpc/errdetails google.golang.org/genproto/googleapis/rpc/errdetails
google.golang.org/genproto/googleapis/rpc/status google.golang.org/genproto/googleapis/rpc/status
@ -789,7 +789,7 @@ google.golang.org/grpc/serviceconfig
google.golang.org/grpc/stats google.golang.org/grpc/stats
google.golang.org/grpc/status google.golang.org/grpc/status
google.golang.org/grpc/tap google.golang.org/grpc/tap
# google.golang.org/protobuf v1.36.1 # google.golang.org/protobuf v1.36.2
## explicit; go 1.21 ## explicit; go 1.21
google.golang.org/protobuf/encoding/protodelim google.golang.org/protobuf/encoding/protodelim
google.golang.org/protobuf/encoding/protojson google.golang.org/protobuf/encoding/protojson