[chore] Update all but bun libraries (#526)

* update all but bun libraries

Signed-off-by: kim <grufwub@gmail.com>

* remove my personal build script changes

Signed-off-by: kim <grufwub@gmail.com>
This commit is contained in:
kim 2022-05-02 14:05:18 +01:00 committed by GitHub
commit b56dae8120
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
350 changed files with 305366 additions and 5943 deletions

View file

@ -79,11 +79,11 @@ would.
So if my application before had:
```go
os.Open('/tmp/foo')
os.Open("/tmp/foo")
```
We would replace it with:
```go
AppFs.Open('/tmp/foo')
AppFs.Open("/tmp/foo")
```
`AppFs` being the variable we defined above.
@ -259,6 +259,18 @@ system using InMemoryFile.
Afero has experimental support for secure file transfer protocol (sftp). Which can
be used to perform file operations over a encrypted channel.
### GCSFs
Afero has experimental support for Google Cloud Storage (GCS). You can either set the
`GOOGLE_APPLICATION_CREDENTIALS_JSON` env variable to your JSON credentials or use `opts` in
`NewGcsFS` to configure access to your GCS bucket.
Some known limitations of the existing implementation:
* No Chmod support - The GCS ACL could probably be mapped to *nix style permissions but that would add another level of complexity and is ignored in this version.
* No Chtimes support - Could be simulated with attributes (gcs a/m-times are set implicitly) but that's is left for another version.
* Not thread safe - Also assumes all file operations are done through the same instance of the GcsFs. File operations between different GcsFs instances are not guaranteed to be consistent.
## Filtering Backends
### BasePathFs

View file

@ -75,6 +75,10 @@ func (u *CacheOnReadFs) copyToLayer(name string) error {
return copyToLayer(u.base, u.layer, name)
}
func (u *CacheOnReadFs) copyFileToLayer(name string, flag int, perm os.FileMode) error {
return copyFileToLayer(u.base, u.layer, name, flag, perm)
}
func (u *CacheOnReadFs) Chtimes(name string, atime, mtime time.Time) error {
st, _, err := u.cacheStatus(name)
if err != nil {
@ -212,7 +216,7 @@ func (u *CacheOnReadFs) OpenFile(name string, flag int, perm os.FileMode) (File,
switch st {
case cacheLocal, cacheHit:
default:
if err := u.copyToLayer(name); err != nil {
if err := u.copyFileToLayer(name, flag, perm); err != nil {
return nil, err
}
}

View file

@ -71,7 +71,7 @@ func CreateFile(name string) *FileData {
}
func CreateDir(name string) *FileData {
return &FileData{name: name, memDir: &DirMap{}, dir: true}
return &FileData{name: name, memDir: &DirMap{}, dir: true, modtime: time.Now()}
}
func ChangeFileName(f *FileData, newname string) {

View file

@ -279,7 +279,7 @@ func (m *MemMapFs) RemoveAll(path string) error {
defer m.mu.RUnlock()
for p := range m.getData() {
if strings.HasPrefix(p, path) {
if p == path || strings.HasPrefix(p, path+FilePathSeparator) {
m.mu.RUnlock()
m.mu.Lock()
delete(m.getData(), p)

View file

@ -268,13 +268,7 @@ func (f *UnionFile) WriteString(s string) (n int, err error) {
return 0, BADFD
}
func copyToLayer(base Fs, layer Fs, name string) error {
bfh, err := base.Open(name)
if err != nil {
return err
}
defer bfh.Close()
func copyFile(base Fs, layer Fs, name string, bfh File) error {
// First make sure the directory exists
exists, err := Exists(layer, filepath.Dir(name))
if err != nil {
@ -315,3 +309,23 @@ func copyToLayer(base Fs, layer Fs, name string) error {
}
return layer.Chtimes(name, bfi.ModTime(), bfi.ModTime())
}
func copyToLayer(base Fs, layer Fs, name string) error {
bfh, err := base.Open(name)
if err != nil {
return err
}
defer bfh.Close()
return copyFile(base, layer, name, bfh)
}
func copyFileToLayer(base Fs, layer Fs, name string, flag int, perm os.FileMode) error {
bfh, err := base.OpenFile(name, flag, perm)
if err != nil {
return err
}
defer bfh.Close()
return copyFile(base, layer, name, bfh)
}

13
vendor/github.com/spf13/cobra/MAINTAINERS generated vendored Normal file
View file

@ -0,0 +1,13 @@
maintainers:
- spf13
- johnSchnake
- jpmcb
- marckhouzam
inactive:
- anthonyfok
- bep
- bogem
- broady
- eparis
- jharshman
- wfernandes

View file

@ -9,11 +9,11 @@ ifeq (, $(shell which richgo))
$(warning "could not find richgo in $(PATH), run: go get github.com/kyoh86/richgo")
endif
.PHONY: fmt lint test cobra_generator install_deps clean
.PHONY: fmt lint test install_deps clean
default: all
all: fmt test cobra_generator
all: fmt test
fmt:
$(info ******************** checking formatting ********************)
@ -23,15 +23,10 @@ lint:
$(info ******************** running lint tools ********************)
golangci-lint run -v
test: install_deps lint
test: install_deps
$(info ******************** running tests ********************)
richgo test -v ./...
cobra_generator: install_deps
$(info ******************** building generator ********************)
mkdir -p $(BIN)
make -C cobra all
install_deps:
$(info ******************** downloading dependencies ********************)
go get -v ./...

View file

@ -1,52 +1,26 @@
![cobra logo](https://cloud.githubusercontent.com/assets/173412/10886352/ad566232-814f-11e5-9cd0-aa101788c117.png)
Cobra is both a library for creating powerful modern CLI applications as well as a program to generate applications and command files.
Cobra is a library for creating powerful modern CLI applications.
Cobra is used in many Go projects such as [Kubernetes](http://kubernetes.io/),
[Hugo](https://gohugo.io), and [Github CLI](https://github.com/cli/cli) to
name a few. [This list](./projects_using_cobra.md) contains a more extensive list of projects using Cobra.
[![](https://img.shields.io/github/workflow/status/spf13/cobra/Test?longCache=tru&label=Test&logo=github%20actions&logoColor=fff)](https://github.com/spf13/cobra/actions?query=workflow%3ATest)
[![GoDoc](https://godoc.org/github.com/spf13/cobra?status.svg)](https://godoc.org/github.com/spf13/cobra)
[![Go Reference](https://pkg.go.dev/badge/github.com/spf13/cobra.svg)](https://pkg.go.dev/github.com/spf13/cobra)
[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/cobra)](https://goreportcard.com/report/github.com/spf13/cobra)
[![Slack](https://img.shields.io/badge/Slack-cobra-brightgreen)](https://gophers.slack.com/archives/CD3LP1199)
# Table of Contents
- [Overview](#overview)
- [Concepts](#concepts)
* [Commands](#commands)
* [Flags](#flags)
- [Installing](#installing)
- [Usage](#usage)
* [Using the Cobra Generator](user_guide.md#using-the-cobra-generator)
* [Using the Cobra Library](user_guide.md#using-the-cobra-library)
* [Working with Flags](user_guide.md#working-with-flags)
* [Positional and Custom Arguments](user_guide.md#positional-and-custom-arguments)
* [Example](user_guide.md#example)
* [Help Command](user_guide.md#help-command)
* [Usage Message](user_guide.md#usage-message)
* [PreRun and PostRun Hooks](user_guide.md#prerun-and-postrun-hooks)
* [Suggestions when "unknown command" happens](user_guide.md#suggestions-when-unknown-command-happens)
* [Generating documentation for your command](user_guide.md#generating-documentation-for-your-command)
* [Generating shell completions](user_guide.md#generating-shell-completions)
- [Contributing](CONTRIBUTING.md)
- [License](#license)
# Overview
Cobra is a library providing a simple interface to create powerful modern CLI
interfaces similar to git & go tools.
Cobra is also an application that will generate your application scaffolding to rapidly
develop a Cobra-based application.
Cobra provides:
* Easy subcommand-based CLIs: `app server`, `app fetch`, etc.
* Fully POSIX-compliant flags (including short & long versions)
* Nested subcommands
* Global, local and cascading flags
* Easy generation of applications & commands with `cobra init appname` & `cobra add cmdname`
* Intelligent suggestions (`app srver`... did you mean `app server`?)
* Automatic help generation for commands and flags
* Automatic help flag recognition of `-h`, `--help`, etc.
@ -54,7 +28,7 @@ Cobra provides:
* Automatically generated man pages for your application
* Command aliases so you can change things without breaking them
* The flexibility to define your own help, usage, etc.
* Optional tight integration with [viper](http://github.com/spf13/viper) for 12-factor apps
* Optional seamless integration with [viper](http://github.com/spf13/viper) for 12-factor apps
# Concepts
@ -88,7 +62,7 @@ have children commands and optionally run an action.
In the example above, 'server' is the command.
[More about cobra.Command](https://godoc.org/github.com/spf13/cobra#Command)
[More about cobra.Command](https://pkg.go.dev/github.com/spf13/cobra#Command)
## Flags
@ -105,10 +79,11 @@ which maintains the same interface while adding POSIX compliance.
# Installing
Using Cobra is easy. First, use `go get` to install the latest version
of the library. This command will install the `cobra` generator executable
along with the library and its dependencies:
of the library.
go get -u github.com/spf13/cobra
```
go get -u github.com/spf13/cobra@latest
```
Next, include Cobra in your application:
@ -117,8 +92,19 @@ import "github.com/spf13/cobra"
```
# Usage
`cobra-cli` is a command line program to generate cobra applications and command files.
It will bootstrap your application scaffolding to rapidly
develop a Cobra-based application. It is the easiest way to incorporate Cobra into your application.
See [User Guide](user_guide.md).
It can be installed by running:
```
go install github.com/spf13/cobra-cli@latest
```
For complete details on using the Cobra-CLI generator, please read [The Cobra Generator README](https://github.com/spf13/cobra-cli/blob/master/README.md)
For complete details on using the Cobra library, please read the [The Cobra User Guide](user_guide.md).
# License

View file

@ -107,3 +107,15 @@ func RangeArgs(min int, max int) PositionalArgs {
return nil
}
}
// MatchAll allows combining several PositionalArgs to work in concert.
func MatchAll(pargs ...PositionalArgs) PositionalArgs {
return func(cmd *Command, args []string) error {
for _, parg := range pargs {
if err := parg(cmd, args); err != nil {
return err
}
}
return nil
}
}

View file

@ -24,7 +24,7 @@ func writePreamble(buf io.StringWriter, name string) {
WriteStringAndCheck(buf, fmt.Sprintf(`
__%[1]s_debug()
{
if [[ -n ${BASH_COMP_DEBUG_FILE} ]]; then
if [[ -n ${BASH_COMP_DEBUG_FILE:-} ]]; then
echo "$*" >> "${BASH_COMP_DEBUG_FILE}"
fi
}
@ -134,7 +134,7 @@ __%[1]s_handle_go_custom_completion()
$filteringCmd
elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then
# File completion for directories only
local subDir
local subdir
# Use printf to strip any trailing newline
subdir=$(printf "%%s" "${out[0]}")
if [ -n "$subdir" ]; then
@ -187,13 +187,19 @@ __%[1]s_handle_reply()
PREFIX=""
cur="${cur#*=}"
${flags_completion[${index}]}
if [ -n "${ZSH_VERSION}" ]; then
if [ -n "${ZSH_VERSION:-}" ]; then
# zsh completion needs --flag= prefix
eval "COMPREPLY=( \"\${COMPREPLY[@]/#/${flag}=}\" )"
fi
fi
fi
return 0;
if [[ -z "${flag_parsing_disabled}" ]]; then
# If flag parsing is enabled, we have completed the flags and can return.
# If flag parsing is disabled, we may not know all (or any) of the flags, so we fallthrough
# to possibly call handle_go_custom_completion.
return 0;
fi
;;
esac
@ -232,13 +238,13 @@ __%[1]s_handle_reply()
fi
if [[ ${#COMPREPLY[@]} -eq 0 ]]; then
if declare -F __%[1]s_custom_func >/dev/null; then
# try command name qualified custom func
__%[1]s_custom_func
else
# otherwise fall back to unqualified for compatibility
declare -F __custom_func >/dev/null && __custom_func
fi
if declare -F __%[1]s_custom_func >/dev/null; then
# try command name qualified custom func
__%[1]s_custom_func
else
# otherwise fall back to unqualified for compatibility
declare -F __custom_func >/dev/null && __custom_func
fi
fi
# available in bash-completion >= 2, not always present on macOS
@ -272,7 +278,7 @@ __%[1]s_handle_flag()
# if a command required a flag, and we found it, unset must_have_one_flag()
local flagname=${words[c]}
local flagvalue
local flagvalue=""
# if the word contained an =
if [[ ${words[c]} == *"="* ]]; then
flagvalue=${flagname#*=} # take in as flagvalue after the =
@ -291,7 +297,7 @@ __%[1]s_handle_flag()
# keep flag value with flagname as flaghash
# flaghash variable is an associative array which is only supported in bash > 3.
if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then
if [[ -z "${BASH_VERSION:-}" || "${BASH_VERSINFO[0]:-}" -gt 3 ]]; then
if [ -n "${flagvalue}" ] ; then
flaghash[${flagname}]=${flagvalue}
elif [ -n "${words[ $((c+1)) ]}" ] ; then
@ -303,7 +309,7 @@ __%[1]s_handle_flag()
# skip the argument to a two word flag
if [[ ${words[c]} != *"="* ]] && __%[1]s_contains_word "${words[c]}" "${two_word_flags[@]}"; then
__%[1]s_debug "${FUNCNAME[0]}: found a flag ${words[c]}, skip the next argument"
__%[1]s_debug "${FUNCNAME[0]}: found a flag ${words[c]}, skip the next argument"
c=$((c+1))
# if we are looking for a flags value, don't show commands
if [[ $c -eq $cword ]]; then
@ -363,7 +369,7 @@ __%[1]s_handle_word()
__%[1]s_handle_command
elif __%[1]s_contains_word "${words[c]}" "${command_aliases[@]}"; then
# aliashash variable is an associative array which is only supported in bash > 3.
if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then
if [[ -z "${BASH_VERSION:-}" || "${BASH_VERSINFO[0]:-}" -gt 3 ]]; then
words[c]=${aliashash[${words[c]}]}
__%[1]s_handle_command
else
@ -394,6 +400,7 @@ func writePostscript(buf io.StringWriter, name string) {
fi
local c=0
local flag_parsing_disabled=
local flags=()
local two_word_flags=()
local local_nonpersistent_flags=()
@ -403,8 +410,8 @@ func writePostscript(buf io.StringWriter, name string) {
local command_aliases=()
local must_have_one_flag=()
local must_have_one_noun=()
local has_completion_function
local last_command
local has_completion_function=""
local last_command=""
local nouns=()
local noun_aliases=()
@ -535,6 +542,11 @@ func writeFlags(buf io.StringWriter, cmd *Command) {
flags_completion=()
`)
if cmd.DisableFlagParsing {
WriteStringAndCheck(buf, " flag_parsing_disabled=1\n")
}
localNonPersistentFlags := cmd.LocalNonPersistentFlags()
cmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) {
if nonCompletableFlag(flag) {
@ -609,7 +621,7 @@ func writeCmdAliases(buf io.StringWriter, cmd *Command) {
sort.Strings(cmd.Aliases)
WriteStringAndCheck(buf, fmt.Sprint(` if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then`, "\n"))
WriteStringAndCheck(buf, fmt.Sprint(` if [[ -z "${BASH_VERSION:-}" || "${BASH_VERSINFO[0]:-}" -gt 3 ]]; then`, "\n"))
for _, value := range cmd.Aliases {
WriteStringAndCheck(buf, fmt.Sprintf(" command_aliases+=(%q)\n", value))
WriteStringAndCheck(buf, fmt.Sprintf(" aliashash[%q]=%q\n", value, cmd.Name()))

View file

@ -138,13 +138,42 @@ __%[1]s_process_completion_results() {
_filedir -d
fi
else
__%[1]s_handle_standard_completion_case
__%[1]s_handle_completion_types
fi
__%[1]s_handle_special_char "$cur" :
__%[1]s_handle_special_char "$cur" =
}
__%[1]s_handle_completion_types() {
__%[1]s_debug "__%[1]s_handle_completion_types: COMP_TYPE is $COMP_TYPE"
case $COMP_TYPE in
37|42)
# Type: menu-complete/menu-complete-backward and insert-completions
# If the user requested inserting one completion at a time, or all
# completions at once on the command-line we must remove the descriptions.
# https://github.com/spf13/cobra/issues/1508
local tab comp
tab=$(printf '\t')
while IFS='' read -r comp; do
# Strip any description
comp=${comp%%%%$tab*}
# Only consider the completions that match
comp=$(compgen -W "$comp" -- "$cur")
if [ -n "$comp" ]; then
COMPREPLY+=("$comp")
fi
done < <(printf "%%s\n" "${out[@]}")
;;
*)
# Type: complete (normal completion)
__%[1]s_handle_standard_completion_case
;;
esac
}
__%[1]s_handle_standard_completion_case() {
local tab comp
tab=$(printf '\t')

View file

@ -1,3 +1,4 @@
//go:build !windows
// +build !windows
package cobra

View file

@ -1,3 +1,4 @@
//go:build windows
// +build windows
package cobra

View file

@ -93,6 +93,8 @@ type CompletionOptions struct {
// DisableDescriptions turns off all completion descriptions for shells
// that support them
DisableDescriptions bool
// HiddenDefaultCmd makes the default 'completion' command hidden
HiddenDefaultCmd bool
}
// NoFileCompletions can be used to disable file completion for commands that should
@ -226,7 +228,17 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
if c.Root().TraverseChildren {
finalCmd, finalArgs, err = c.Root().Traverse(trimmedArgs)
} else {
finalCmd, finalArgs, err = c.Root().Find(trimmedArgs)
// For Root commands that don't specify any value for their Args fields, when we call
// Find(), if those Root commands don't have any sub-commands, they will accept arguments.
// However, because we have added the __complete sub-command in the current code path, the
// call to Find() -> legacyArgs() will return an error if there are any arguments.
// To avoid this, we first remove the __complete command to get back to having no sub-commands.
rootCmd := c.Root()
if len(rootCmd.Commands()) == 1 {
rootCmd.RemoveCommand(c)
}
finalCmd, finalArgs, err = rootCmd.Find(trimmedArgs)
}
if err != nil {
// Unable to find the real command. E.g., <program> someInvalidCmd <TAB>
@ -266,6 +278,12 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
}
}
// We only remove the flags from the arguments if DisableFlagParsing is not set.
// This is important for commands which have requested to do their own flag completion.
if !finalCmd.DisableFlagParsing {
finalArgs = finalCmd.Flags().Args()
}
if flag != nil && flagCompletion {
// Check if we are completing a flag value subject to annotations
if validExts, present := flag.Annotations[BashCompFilenameExt]; present {
@ -290,12 +308,16 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
}
}
var completions []string
var directive ShellCompDirective
// Note that we want to perform flagname completion even if finalCmd.DisableFlagParsing==true;
// doing this allows for completion of persistant flag names even for commands that disable flag parsing.
//
// When doing completion of a flag name, as soon as an argument starts with
// a '-' we know it is a flag. We cannot use isFlagArg() here as it requires
// the flag name to be complete
if flag == nil && len(toComplete) > 0 && toComplete[0] == '-' && !strings.Contains(toComplete, "=") && flagCompletion {
var completions []string
// First check for required flags
completions = completeRequireFlags(finalCmd, toComplete)
@ -322,86 +344,86 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
})
}
directive := ShellCompDirectiveNoFileComp
directive = ShellCompDirectiveNoFileComp
if len(completions) == 1 && strings.HasSuffix(completions[0], "=") {
// If there is a single completion, the shell usually adds a space
// after the completion. We don't want that if the flag ends with an =
directive = ShellCompDirectiveNoSpace
}
return finalCmd, completions, directive, nil
}
// We only remove the flags from the arguments if DisableFlagParsing is not set.
// This is important for commands which have requested to do their own flag completion.
if !finalCmd.DisableFlagParsing {
finalArgs = finalCmd.Flags().Args()
}
var completions []string
directive := ShellCompDirectiveDefault
if flag == nil {
foundLocalNonPersistentFlag := false
// If TraverseChildren is true on the root command we don't check for
// local flags because we can use a local flag on a parent command
if !finalCmd.Root().TraverseChildren {
// Check if there are any local, non-persistent flags on the command-line
localNonPersistentFlags := finalCmd.LocalNonPersistentFlags()
finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) {
if localNonPersistentFlags.Lookup(flag.Name) != nil && flag.Changed {
foundLocalNonPersistentFlag = true
}
})
if !finalCmd.DisableFlagParsing {
// If DisableFlagParsing==false, we have completed the flags as known by Cobra;
// we can return what we found.
// If DisableFlagParsing==true, Cobra may not be aware of all flags, so we
// let the logic continue to see if ValidArgsFunction needs to be called.
return finalCmd, completions, directive, nil
}
// Complete subcommand names, including the help command
if len(finalArgs) == 0 && !foundLocalNonPersistentFlag {
// We only complete sub-commands if:
// - there are no arguments on the command-line and
// - there are no local, non-persistent flags on the command-line or TraverseChildren is true
for _, subCmd := range finalCmd.Commands() {
if subCmd.IsAvailableCommand() || subCmd == finalCmd.helpCommand {
if strings.HasPrefix(subCmd.Name(), toComplete) {
completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short))
} else {
directive = ShellCompDirectiveDefault
if flag == nil {
foundLocalNonPersistentFlag := false
// If TraverseChildren is true on the root command we don't check for
// local flags because we can use a local flag on a parent command
if !finalCmd.Root().TraverseChildren {
// Check if there are any local, non-persistent flags on the command-line
localNonPersistentFlags := finalCmd.LocalNonPersistentFlags()
finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) {
if localNonPersistentFlags.Lookup(flag.Name) != nil && flag.Changed {
foundLocalNonPersistentFlag = true
}
})
}
// Complete subcommand names, including the help command
if len(finalArgs) == 0 && !foundLocalNonPersistentFlag {
// We only complete sub-commands if:
// - there are no arguments on the command-line and
// - there are no local, non-persistent flags on the command-line or TraverseChildren is true
for _, subCmd := range finalCmd.Commands() {
if subCmd.IsAvailableCommand() || subCmd == finalCmd.helpCommand {
if strings.HasPrefix(subCmd.Name(), toComplete) {
completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short))
}
directive = ShellCompDirectiveNoFileComp
}
directive = ShellCompDirectiveNoFileComp
}
}
}
// Complete required flags even without the '-' prefix
completions = append(completions, completeRequireFlags(finalCmd, toComplete)...)
// Complete required flags even without the '-' prefix
completions = append(completions, completeRequireFlags(finalCmd, toComplete)...)
// Always complete ValidArgs, even if we are completing a subcommand name.
// This is for commands that have both subcommands and ValidArgs.
if len(finalCmd.ValidArgs) > 0 {
if len(finalArgs) == 0 {
// ValidArgs are only for the first argument
for _, validArg := range finalCmd.ValidArgs {
if strings.HasPrefix(validArg, toComplete) {
completions = append(completions, validArg)
// Always complete ValidArgs, even if we are completing a subcommand name.
// This is for commands that have both subcommands and ValidArgs.
if len(finalCmd.ValidArgs) > 0 {
if len(finalArgs) == 0 {
// ValidArgs are only for the first argument
for _, validArg := range finalCmd.ValidArgs {
if strings.HasPrefix(validArg, toComplete) {
completions = append(completions, validArg)
}
}
}
directive = ShellCompDirectiveNoFileComp
directive = ShellCompDirectiveNoFileComp
// If no completions were found within commands or ValidArgs,
// see if there are any ArgAliases that should be completed.
if len(completions) == 0 {
for _, argAlias := range finalCmd.ArgAliases {
if strings.HasPrefix(argAlias, toComplete) {
completions = append(completions, argAlias)
// If no completions were found within commands or ValidArgs,
// see if there are any ArgAliases that should be completed.
if len(completions) == 0 {
for _, argAlias := range finalCmd.ArgAliases {
if strings.HasPrefix(argAlias, toComplete) {
completions = append(completions, argAlias)
}
}
}
}
// If there are ValidArgs specified (even if they don't match), we stop completion.
// Only one of ValidArgs or ValidArgsFunction can be used for a single command.
return finalCmd, completions, directive, nil
}
// If there are ValidArgs specified (even if they don't match), we stop completion.
// Only one of ValidArgs or ValidArgsFunction can be used for a single command.
return finalCmd, completions, directive, nil
// Let the logic continue so as to add any ValidArgsFunction completions,
// even if we already found sub-commands.
// This is for commands that have subcommands but also specify a ValidArgsFunction.
}
// Let the logic continue so as to add any ValidArgsFunction completions,
// even if we already found sub-commands.
// This is for commands that have subcommands but also specify a ValidArgsFunction.
}
// Find the completion function for the flag or command
@ -589,39 +611,43 @@ func (c *Command) initDefaultCompletionCmd() {
completionCmd := &Command{
Use: compCmdName,
Short: "generate the autocompletion script for the specified shell",
Long: fmt.Sprintf(`
Generate the autocompletion script for %[1]s for the specified shell.
Short: "Generate the autocompletion script for the specified shell",
Long: fmt.Sprintf(`Generate the autocompletion script for %[1]s for the specified shell.
See each sub-command's help for details on how to use the generated script.
`, c.Root().Name()),
Args: NoArgs,
ValidArgsFunction: NoFileCompletions,
Hidden: c.CompletionOptions.HiddenDefaultCmd,
}
c.AddCommand(completionCmd)
out := c.OutOrStdout()
noDesc := c.CompletionOptions.DisableDescriptions
shortDesc := "generate the autocompletion script for %s"
shortDesc := "Generate the autocompletion script for %s"
bash := &Command{
Use: "bash",
Short: fmt.Sprintf(shortDesc, "bash"),
Long: fmt.Sprintf(`
Generate the autocompletion script for the bash shell.
Long: fmt.Sprintf(`Generate the autocompletion script for the bash shell.
This script depends on the 'bash-completion' package.
If it is not installed already, you can install it via your OS's package manager.
To load completions in your current shell session:
$ source <(%[1]s completion bash)
source <(%[1]s completion bash)
To load completions for every new session, execute once:
Linux:
$ %[1]s completion bash > /etc/bash_completion.d/%[1]s
MacOS:
$ %[1]s completion bash > /usr/local/etc/bash_completion.d/%[1]s
#### Linux:
%[1]s completion bash > /etc/bash_completion.d/%[1]s
#### macOS:
%[1]s completion bash > /usr/local/etc/bash_completion.d/%[1]s
You will need to start a new shell for this setup to take effect.
`, c.Root().Name()),
`, c.Root().Name()),
Args: NoArgs,
DisableFlagsInUseLine: true,
ValidArgsFunction: NoFileCompletions,
@ -636,19 +662,22 @@ You will need to start a new shell for this setup to take effect.
zsh := &Command{
Use: "zsh",
Short: fmt.Sprintf(shortDesc, "zsh"),
Long: fmt.Sprintf(`
Generate the autocompletion script for the zsh shell.
Long: fmt.Sprintf(`Generate the autocompletion script for the zsh shell.
If shell completion is not already enabled in your environment you will need
to enable it. You can execute the following once:
$ echo "autoload -U compinit; compinit" >> ~/.zshrc
echo "autoload -U compinit; compinit" >> ~/.zshrc
To load completions for every new session, execute once:
# Linux:
$ %[1]s completion zsh > "${fpath[1]}/_%[1]s"
# macOS:
$ %[1]s completion zsh > /usr/local/share/zsh/site-functions/_%[1]s
#### Linux:
%[1]s completion zsh > "${fpath[1]}/_%[1]s"
#### macOS:
%[1]s completion zsh > /usr/local/share/zsh/site-functions/_%[1]s
You will need to start a new shell for this setup to take effect.
`, c.Root().Name()),
@ -668,14 +697,15 @@ You will need to start a new shell for this setup to take effect.
fish := &Command{
Use: "fish",
Short: fmt.Sprintf(shortDesc, "fish"),
Long: fmt.Sprintf(`
Generate the autocompletion script for the fish shell.
Long: fmt.Sprintf(`Generate the autocompletion script for the fish shell.
To load completions in your current shell session:
$ %[1]s completion fish | source
%[1]s completion fish | source
To load completions for every new session, execute once:
$ %[1]s completion fish > ~/.config/fish/completions/%[1]s.fish
%[1]s completion fish > ~/.config/fish/completions/%[1]s.fish
You will need to start a new shell for this setup to take effect.
`, c.Root().Name()),
@ -692,11 +722,11 @@ You will need to start a new shell for this setup to take effect.
powershell := &Command{
Use: "powershell",
Short: fmt.Sprintf(shortDesc, "powershell"),
Long: fmt.Sprintf(`
Generate the autocompletion script for powershell.
Long: fmt.Sprintf(`Generate the autocompletion script for powershell.
To load completions in your current shell session:
PS C:\> %[1]s completion powershell | Out-String | Invoke-Expression
%[1]s completion powershell | Out-String | Invoke-Expression
To load completions for every new session, add the output of the above command
to your powershell profile.

View file

@ -50,7 +50,7 @@ Register-ArgumentCompleter -CommandName '%[1]s' -ScriptBlock {
if ($Command.Length -gt $CursorPosition) {
$Command=$Command.Substring(0,$CursorPosition)
}
__%[1]s_debug "Truncated command: $Command"
__%[1]s_debug "Truncated command: $Command"
$ShellCompDirectiveError=%[3]d
$ShellCompDirectiveNoSpace=%[4]d
@ -58,7 +58,7 @@ Register-ArgumentCompleter -CommandName '%[1]s' -ScriptBlock {
$ShellCompDirectiveFilterFileExt=%[6]d
$ShellCompDirectiveFilterDirs=%[7]d
# Prepare the command to request completions for the program.
# Prepare the command to request completions for the program.
# Split the command at the first space to separate the program and arguments.
$Program,$Arguments = $Command.Split(" ",2)
$RequestComp="$Program %[2]s $Arguments"
@ -233,7 +233,7 @@ Register-ArgumentCompleter -CommandName '%[1]s' -ScriptBlock {
Default {
# Like MenuComplete but we don't want to add a space here because
# the user need to press space anyway to get the completion.
# Description will not be shown because thats not possible with TabCompleteNext
# Description will not be shown because that's not possible with TabCompleteNext
[System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars), "$($comp.Name)", 'ParameterValue', "$($comp.Description)")
}
}

View file

@ -4,6 +4,7 @@
- [Bleve](http://www.blevesearch.com/)
- [CockroachDB](http://www.cockroachlabs.com/)
- [Cosmos SDK](https://github.com/cosmos/cosmos-sdk)
- [Datree](https://github.com/datreeio/datree)
- [Delve](https://github.com/derekparker/delve)
- [Docker (distribution)](https://github.com/docker/distribution)
- [Etcd](https://etcd.io/)
@ -14,25 +15,37 @@
- [GitHub Labeler](https://github.com/erdaltsksn/gh-label)
- [Golangci-lint](https://golangci-lint.run)
- [GopherJS](http://www.gopherjs.org/)
- [GoReleaser](https://goreleaser.com)
- [Helm](https://helm.sh)
- [Hugo](https://gohugo.io)
- [Infracost](https://github.com/infracost/infracost)
- [Istio](https://istio.io)
- [Kool](https://github.com/kool-dev/kool)
- [Kubernetes](http://kubernetes.io/)
- [Linkerd](https://linkerd.io/)
- [Mattermost-server](https://github.com/mattermost/mattermost-server)
- [Mercure](https://mercure.rocks/)
- [Meroxa CLI](https://github.com/meroxa/cli)
- [Metal Stack CLI](https://github.com/metal-stack/metalctl)
- [Moby (former Docker)](https://github.com/moby/moby)
- [Moldy](https://github.com/Moldy-Community/moldy)
- [Multi-gitter](https://github.com/lindell/multi-gitter)
- [Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack)
- [nFPM](https://nfpm.goreleaser.com)
- [OpenShift](https://www.openshift.com/)
- [Ory Hydra](https://github.com/ory/hydra)
- [Ory Kratos](https://github.com/ory/kratos)
- [Pixie](https://github.com/pixie-io/pixie)
- [Pouch](https://github.com/alibaba/pouch)
- [ProjectAtomic (enterprise)](http://www.projectatomic.io/)
- [Prototool](https://github.com/uber/prototool)
- [QRcp](https://github.com/claudiodangelis/qrcp)
- [Random](https://github.com/erdaltsksn/random)
- [Rclone](https://rclone.org/)
- [Scaleway CLI](https://github.com/scaleway/scaleway-cli)
- [Skaffold](https://skaffold.dev/)
- [Tendermint](https://github.com/tendermint/tendermint)
- [Twitch CLI](https://github.com/twitchdev/twitch-cli)
- [UpCloud CLI (`upctl`)](https://github.com/UpCloudLtd/upcloud-cli)
- VMware's [Tanzu Community Edition](https://github.com/vmware-tanzu/community-edition) & [Tanzu Framework](https://github.com/vmware-tanzu/tanzu-framework)
- [Werf](https://werf.io/)

View file

@ -16,10 +16,12 @@ If you do not wish to use the default `completion` command, you can choose to
provide your own, which will take precedence over the default one. (This also provides
backwards-compatibility with programs that already have their own `completion` command.)
If you are using the generator, you can create a completion command by running
If you are using the `cobra-cli` generator,
which can be found at [spf13/cobra-cli](https://github.com/spf13/cobra-cli),
you can create a completion command by running
```bash
cobra add completion
cobra-cli add completion
```
and then modifying the generated `cmd/completion.go` file to look something like this
(writing the shell script to stdout allows the most flexible use):
@ -28,17 +30,17 @@ and then modifying the generated `cmd/completion.go` file to look something like
var completionCmd = &cobra.Command{
Use: "completion [bash|zsh|fish|powershell]",
Short: "Generate completion script",
Long: `To load completions:
Long: fmt.Sprintf(`To load completions:
Bash:
$ source <(yourprogram completion bash)
$ source <(%[1]s completion bash)
# To load completions for each session, execute once:
# Linux:
$ yourprogram completion bash > /etc/bash_completion.d/yourprogram
$ %[1]s completion bash > /etc/bash_completion.d/%[1]s
# macOS:
$ yourprogram completion bash > /usr/local/etc/bash_completion.d/yourprogram
$ %[1]s completion bash > /usr/local/etc/bash_completion.d/%[1]s
Zsh:
@ -48,25 +50,25 @@ Zsh:
$ echo "autoload -U compinit; compinit" >> ~/.zshrc
# To load completions for each session, execute once:
$ yourprogram completion zsh > "${fpath[1]}/_yourprogram"
$ %[1]s completion zsh > "${fpath[1]}/_%[1]s"
# You will need to start a new shell for this setup to take effect.
fish:
$ yourprogram completion fish | source
$ %[1]s completion fish | source
# To load completions for each session, execute once:
$ yourprogram completion fish > ~/.config/fish/completions/yourprogram.fish
$ %[1]s completion fish > ~/.config/fish/completions/%[1]s.fish
PowerShell:
PS> yourprogram completion powershell | Out-String | Invoke-Expression
PS> %[1]s completion powershell | Out-String | Invoke-Expression
# To load completions for every new session, run:
PS> yourprogram completion powershell > yourprogram.ps1
PS> %[1]s completion powershell > %[1]s.ps1
# and source this file from your PowerShell profile.
`,
`,cmd.Root().Name()),
DisableFlagsInUseLine: true,
ValidArgs: []string{"bash", "zsh", "fish", "powershell"},
Args: cobra.ExactValidArgs(1),

View file

@ -29,10 +29,10 @@ func main() {
## Using the Cobra Generator
Cobra provides its own program that will create your application and add any
Cobra-CLI is its own program that will create your application and add any
commands you want. It's the easiest way to incorporate Cobra into your application.
[Here](https://github.com/spf13/cobra/blob/master/cobra/README.md) you can find more information about it.
For complete details on using the Cobra generator, please refer to [The Cobra-CLI Generator README](https://github.com/spf13/cobra-cli/blob/master/README.md)
## Using the Cobra Library
@ -86,7 +86,7 @@ var (
userLicense string
rootCmd = &cobra.Command{
Use: "cobra",
Use: "cobra-cli",
Short: "A generator for Cobra based Applications",
Long: `Cobra is a CLI library for Go that empowers applications.
This application is a tool to generate the needed files
@ -281,7 +281,7 @@ func init() {
In this example, the persistent flag `author` is bound with `viper`.
**Note**: the variable `author` will not be set to the value from config,
when the `--author` flag is not provided by user.
when the `--author` flag is provided by user.
More in [viper documentation](https://github.com/spf13/viper#working-with-flags).
@ -315,6 +315,7 @@ The following validators are built in:
- `ExactArgs(int)` - the command will report an error if there are not exactly N positional args.
- `ExactValidArgs(int)` - the command will report an error if there are not exactly N positional args OR if there are any positional args that are not in the `ValidArgs` field of `Command`
- `RangeArgs(min, max)` - the command will report an error if the number of args is not between the minimum and maximum number of expected args.
- `MatchAll(pargs ...PositionalArgs)` - enables combining existing checks with arbitrary other checks (e.g. you want to check the ExactArgs length along with other qualities).
An example of setting the custom validator:

View file

@ -202,7 +202,7 @@ _%[1]s()
_arguments '*:filename:'"$filteringCmd"
elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then
# File completion for directories only
local subDir
local subdir
subdir="${completions[1]}"
if [ -n "$subdir" ]; then
__%[1]s_debug "Listing directories in $subdir"
@ -250,7 +250,7 @@ _%[1]s()
# don't run the completion function when being source-ed or eval-ed
if [ "$funcstack[1]" = "_%[1]s" ]; then
_%[1]s
_%[1]s
fi
`, name, compCmd,
ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp,

View file

@ -3,7 +3,10 @@ run:
linters-settings:
gci:
local-prefixes: github.com/spf13/viper
sections:
- standard
- default
- prefix(github.com/spf13/viper)
golint:
min-confidence: 0
goimports:

View file

@ -15,8 +15,8 @@ TEST_FORMAT = short-verbose
endif
# Dependency versions
GOTESTSUM_VERSION = 1.7.0
GOLANGCI_VERSION = 1.43.0
GOTESTSUM_VERSION = 1.8.0
GOLANGCI_VERSION = 1.45.2
# Add the ability to override some variables
# Use with care

View file

@ -11,7 +11,7 @@
[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/spf13/viper/CI?style=flat-square)](https://github.com/spf13/viper/actions?query=workflow%3ACI)
[![Join the chat at https://gitter.im/spf13/viper](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/spf13/viper?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/viper?style=flat-square)](https://goreportcard.com/report/github.com/spf13/viper)
![Go Version](https://img.shields.io/badge/go%20version-%3E=1.14-61CFDD.svg?style=flat-square)
![Go Version](https://img.shields.io/badge/go%20version-%3E=1.15-61CFDD.svg?style=flat-square)
[![PkgGoDev](https://pkg.go.dev/badge/mod/github.com/spf13/viper)](https://pkg.go.dev/mod/github.com/spf13/viper)
**Go configuration with fangs!**

View file

@ -21,3 +21,12 @@ The solution is easy: switch to using Go Modules.
Please refer to the [wiki](https://github.com/golang/go/wiki/Modules) on how to do that.
**tl;dr* `export GO111MODULE=on`
## Unquoted 'y' and 'n' characters get replaced with _true_ and _false_ when reading a YAML file
This is a YAML 1.1 feature according to [go-yaml/yaml#740](https://github.com/go-yaml/yaml/issues/740).
Potential solutions are:
1. Quoting values resolved as boolean
1. Upgrading to YAML v3 (for the time being this is possible by passing the `viper_yaml3` tag to your build)

11
vendor/github.com/spf13/viper/experimental_logger.go generated vendored Normal file
View file

@ -0,0 +1,11 @@
//go:build viper_logger
// +build viper_logger
package viper
// WithLogger sets a custom logger.
func WithLogger(l Logger) Option {
return optionFunc(func(v *Viper) {
v.logger = l
})
}

View file

@ -4,10 +4,10 @@ import (
"sync"
)
// Decoder decodes the contents of b into a v representation.
// Decoder decodes the contents of b into v.
// It's primarily used for decoding contents of a file into a map[string]interface{}.
type Decoder interface {
Decode(b []byte, v interface{}) error
Decode(b []byte, v map[string]interface{}) error
}
const (
@ -48,7 +48,7 @@ func (e *DecoderRegistry) RegisterDecoder(format string, enc Decoder) error {
}
// Decode calls the underlying Decoder based on the format.
func (e *DecoderRegistry) Decode(format string, b []byte, v interface{}) error {
func (e *DecoderRegistry) Decode(format string, b []byte, v map[string]interface{}) error {
e.mu.RLock()
decoder, ok := e.decoders[format]
e.mu.RUnlock()

View file

@ -0,0 +1,61 @@
package dotenv
import (
"bytes"
"fmt"
"sort"
"strings"
"github.com/subosito/gotenv"
)
const keyDelimiter = "_"
// Codec implements the encoding.Encoder and encoding.Decoder interfaces for encoding data containing environment variables
// (commonly called as dotenv format).
type Codec struct{}
func (Codec) Encode(v map[string]interface{}) ([]byte, error) {
flattened := map[string]interface{}{}
flattened = flattenAndMergeMap(flattened, v, "", keyDelimiter)
keys := make([]string, 0, len(flattened))
for key := range flattened {
keys = append(keys, key)
}
sort.Strings(keys)
var buf bytes.Buffer
for _, key := range keys {
_, err := buf.WriteString(fmt.Sprintf("%v=%v\n", strings.ToUpper(key), flattened[key]))
if err != nil {
return nil, err
}
}
return buf.Bytes(), nil
}
func (Codec) Decode(b []byte, v map[string]interface{}) error {
var buf bytes.Buffer
_, err := buf.Write(b)
if err != nil {
return err
}
env, err := gotenv.StrictParse(&buf)
if err != nil {
return err
}
for key, value := range env {
v[key] = value
}
return nil
}

View file

@ -0,0 +1,41 @@
package dotenv
import (
"strings"
"github.com/spf13/cast"
)
// flattenAndMergeMap recursively flattens the given map into a new map
// Code is based on the function with the same name in tha main package.
// TODO: move it to a common place
func flattenAndMergeMap(shadow map[string]interface{}, m map[string]interface{}, prefix string, delimiter string) map[string]interface{} {
if shadow != nil && prefix != "" && shadow[prefix] != nil {
// prefix is shadowed => nothing more to flatten
return shadow
}
if shadow == nil {
shadow = make(map[string]interface{})
}
var m2 map[string]interface{}
if prefix != "" {
prefix += delimiter
}
for k, val := range m {
fullKey := prefix + k
switch val.(type) {
case map[string]interface{}:
m2 = val.(map[string]interface{})
case map[interface{}]interface{}:
m2 = cast.ToStringMap(val)
default:
// immediate value
shadow[strings.ToLower(fullKey)] = val
continue
}
// recursively merge to shadow map
shadow = flattenAndMergeMap(shadow, m2, fullKey, delimiter)
}
return shadow
}

View file

@ -7,7 +7,7 @@ import (
// Encoder encodes the contents of v into a byte representation.
// It's primarily used for encoding a map[string]interface{} into a file format.
type Encoder interface {
Encode(v interface{}) ([]byte, error)
Encode(v map[string]interface{}) ([]byte, error)
}
const (
@ -47,7 +47,7 @@ func (e *EncoderRegistry) RegisterEncoder(format string, enc Encoder) error {
return nil
}
func (e *EncoderRegistry) Encode(format string, v interface{}) ([]byte, error) {
func (e *EncoderRegistry) Encode(format string, v map[string]interface{}) ([]byte, error) {
e.mu.RLock()
encoder, ok := e.encoders[format]
e.mu.RUnlock()

View file

@ -12,7 +12,7 @@ import (
// TODO: add printer config to the codec?
type Codec struct{}
func (Codec) Encode(v interface{}) ([]byte, error) {
func (Codec) Encode(v map[string]interface{}) ([]byte, error) {
b, err := json.Marshal(v)
if err != nil {
return nil, err
@ -35,6 +35,6 @@ func (Codec) Encode(v interface{}) ([]byte, error) {
return buf.Bytes(), nil
}
func (Codec) Decode(b []byte, v interface{}) error {
return hcl.Unmarshal(b, v)
func (Codec) Decode(b []byte, v map[string]interface{}) error {
return hcl.Unmarshal(b, &v)
}

View file

@ -0,0 +1,99 @@
package ini
import (
"bytes"
"sort"
"strings"
"github.com/spf13/cast"
"gopkg.in/ini.v1"
)
// LoadOptions contains all customized options used for load data source(s).
// This type is added here for convenience: this way consumers can import a single package called "ini".
type LoadOptions = ini.LoadOptions
// Codec implements the encoding.Encoder and encoding.Decoder interfaces for INI encoding.
type Codec struct {
KeyDelimiter string
LoadOptions LoadOptions
}
func (c Codec) Encode(v map[string]interface{}) ([]byte, error) {
cfg := ini.Empty()
ini.PrettyFormat = false
flattened := map[string]interface{}{}
flattened = flattenAndMergeMap(flattened, v, "", c.keyDelimiter())
keys := make([]string, 0, len(flattened))
for key := range flattened {
keys = append(keys, key)
}
sort.Strings(keys)
for _, key := range keys {
sectionName, keyName := "", key
lastSep := strings.LastIndex(key, ".")
if lastSep != -1 {
sectionName = key[:(lastSep)]
keyName = key[(lastSep + 1):]
}
// TODO: is this a good idea?
if sectionName == "default" {
sectionName = ""
}
cfg.Section(sectionName).Key(keyName).SetValue(cast.ToString(flattened[key]))
}
var buf bytes.Buffer
_, err := cfg.WriteTo(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (c Codec) Decode(b []byte, v map[string]interface{}) error {
cfg := ini.Empty(c.LoadOptions)
err := cfg.Append(b)
if err != nil {
return err
}
sections := cfg.Sections()
for i := 0; i < len(sections); i++ {
section := sections[i]
keys := section.Keys()
for j := 0; j < len(keys); j++ {
key := keys[j]
value := cfg.Section(section.Name()).Key(key.Name()).String()
deepestMap := deepSearch(v, strings.Split(section.Name(), c.keyDelimiter()))
// set innermost value
deepestMap[key.Name()] = value
}
}
return nil
}
func (c Codec) keyDelimiter() string {
if c.KeyDelimiter == "" {
return "."
}
return c.KeyDelimiter
}

View file

@ -0,0 +1,74 @@
package ini
import (
"strings"
"github.com/spf13/cast"
)
// THIS CODE IS COPIED HERE: IT SHOULD NOT BE MODIFIED
// AT SOME POINT IT WILL BE MOVED TO A COMMON PLACE
// deepSearch scans deep maps, following the key indexes listed in the
// sequence "path".
// The last value is expected to be another map, and is returned.
//
// In case intermediate keys do not exist, or map to a non-map value,
// a new map is created and inserted, and the search continues from there:
// the initial map "m" may be modified!
func deepSearch(m map[string]interface{}, path []string) map[string]interface{} {
for _, k := range path {
m2, ok := m[k]
if !ok {
// intermediate key does not exist
// => create it and continue from there
m3 := make(map[string]interface{})
m[k] = m3
m = m3
continue
}
m3, ok := m2.(map[string]interface{})
if !ok {
// intermediate key is a value
// => replace with a new map
m3 = make(map[string]interface{})
m[k] = m3
}
// continue search from here
m = m3
}
return m
}
// flattenAndMergeMap recursively flattens the given map into a new map
// Code is based on the function with the same name in tha main package.
// TODO: move it to a common place
func flattenAndMergeMap(shadow map[string]interface{}, m map[string]interface{}, prefix string, delimiter string) map[string]interface{} {
if shadow != nil && prefix != "" && shadow[prefix] != nil {
// prefix is shadowed => nothing more to flatten
return shadow
}
if shadow == nil {
shadow = make(map[string]interface{})
}
var m2 map[string]interface{}
if prefix != "" {
prefix += delimiter
}
for k, val := range m {
fullKey := prefix + k
switch val.(type) {
case map[string]interface{}:
m2 = val.(map[string]interface{})
case map[interface{}]interface{}:
m2 = cast.ToStringMap(val)
default:
// immediate value
shadow[strings.ToLower(fullKey)] = val
continue
}
// recursively merge to shadow map
shadow = flattenAndMergeMap(shadow, m2, fullKey, delimiter)
}
return shadow
}

View file

@ -0,0 +1,86 @@
package javaproperties
import (
"bytes"
"sort"
"strings"
"github.com/magiconair/properties"
"github.com/spf13/cast"
)
// Codec implements the encoding.Encoder and encoding.Decoder interfaces for Java properties encoding.
type Codec struct {
KeyDelimiter string
// Store read properties on the object so that we can write back in order with comments.
// This will only be used if the configuration read is a properties file.
// TODO: drop this feature in v2
// TODO: make use of the global properties object optional
Properties *properties.Properties
}
func (c *Codec) Encode(v map[string]interface{}) ([]byte, error) {
if c.Properties == nil {
c.Properties = properties.NewProperties()
}
flattened := map[string]interface{}{}
flattened = flattenAndMergeMap(flattened, v, "", c.keyDelimiter())
keys := make([]string, 0, len(flattened))
for key := range flattened {
keys = append(keys, key)
}
sort.Strings(keys)
for _, key := range keys {
_, _, err := c.Properties.Set(key, cast.ToString(flattened[key]))
if err != nil {
return nil, err
}
}
var buf bytes.Buffer
_, err := c.Properties.WriteComment(&buf, "#", properties.UTF8)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (c *Codec) Decode(b []byte, v map[string]interface{}) error {
var err error
c.Properties, err = properties.Load(b, properties.UTF8)
if err != nil {
return err
}
for _, key := range c.Properties.Keys() {
// ignore existence check: we know it's there
value, _ := c.Properties.Get(key)
// recursively build nested maps
path := strings.Split(key, c.keyDelimiter())
lastKey := strings.ToLower(path[len(path)-1])
deepestMap := deepSearch(v, path[0:len(path)-1])
// set innermost value
deepestMap[lastKey] = value
}
return nil
}
func (c Codec) keyDelimiter() string {
if c.KeyDelimiter == "" {
return "."
}
return c.KeyDelimiter
}

View file

@ -0,0 +1,74 @@
package javaproperties
import (
"strings"
"github.com/spf13/cast"
)
// THIS CODE IS COPIED HERE: IT SHOULD NOT BE MODIFIED
// AT SOME POINT IT WILL BE MOVED TO A COMMON PLACE
// deepSearch scans deep maps, following the key indexes listed in the
// sequence "path".
// The last value is expected to be another map, and is returned.
//
// In case intermediate keys do not exist, or map to a non-map value,
// a new map is created and inserted, and the search continues from there:
// the initial map "m" may be modified!
func deepSearch(m map[string]interface{}, path []string) map[string]interface{} {
for _, k := range path {
m2, ok := m[k]
if !ok {
// intermediate key does not exist
// => create it and continue from there
m3 := make(map[string]interface{})
m[k] = m3
m = m3
continue
}
m3, ok := m2.(map[string]interface{})
if !ok {
// intermediate key is a value
// => replace with a new map
m3 = make(map[string]interface{})
m[k] = m3
}
// continue search from here
m = m3
}
return m
}
// flattenAndMergeMap recursively flattens the given map into a new map
// Code is based on the function with the same name in tha main package.
// TODO: move it to a common place
func flattenAndMergeMap(shadow map[string]interface{}, m map[string]interface{}, prefix string, delimiter string) map[string]interface{} {
if shadow != nil && prefix != "" && shadow[prefix] != nil {
// prefix is shadowed => nothing more to flatten
return shadow
}
if shadow == nil {
shadow = make(map[string]interface{})
}
var m2 map[string]interface{}
if prefix != "" {
prefix += delimiter
}
for k, val := range m {
fullKey := prefix + k
switch val.(type) {
case map[string]interface{}:
m2 = val.(map[string]interface{})
case map[interface{}]interface{}:
m2 = cast.ToStringMap(val)
default:
// immediate value
shadow[strings.ToLower(fullKey)] = val
continue
}
// recursively merge to shadow map
shadow = flattenAndMergeMap(shadow, m2, fullKey, delimiter)
}
return shadow
}

View file

@ -7,11 +7,11 @@ import (
// Codec implements the encoding.Encoder and encoding.Decoder interfaces for JSON encoding.
type Codec struct{}
func (Codec) Encode(v interface{}) ([]byte, error) {
func (Codec) Encode(v map[string]interface{}) ([]byte, error) {
// TODO: expose prefix and indent in the Codec as setting?
return json.MarshalIndent(v, "", " ")
}
func (Codec) Decode(b []byte, v interface{}) error {
return json.Unmarshal(b, v)
func (Codec) Decode(b []byte, v map[string]interface{}) error {
return json.Unmarshal(b, &v)
}

View file

@ -1,3 +1,6 @@
//go:build !viper_toml2
// +build !viper_toml2
package toml
import (
@ -7,39 +10,30 @@ import (
// Codec implements the encoding.Encoder and encoding.Decoder interfaces for TOML encoding.
type Codec struct{}
func (Codec) Encode(v interface{}) ([]byte, error) {
if m, ok := v.(map[string]interface{}); ok {
t, err := toml.TreeFromMap(m)
if err != nil {
return nil, err
}
s, err := t.ToTomlString()
if err != nil {
return nil, err
}
return []byte(s), nil
func (Codec) Encode(v map[string]interface{}) ([]byte, error) {
t, err := toml.TreeFromMap(v)
if err != nil {
return nil, err
}
return toml.Marshal(v)
s, err := t.ToTomlString()
if err != nil {
return nil, err
}
return []byte(s), nil
}
func (Codec) Decode(b []byte, v interface{}) error {
func (Codec) Decode(b []byte, v map[string]interface{}) error {
tree, err := toml.LoadBytes(b)
if err != nil {
return err
}
if m, ok := v.(*map[string]interface{}); ok {
vmap := *m
tmap := tree.ToMap()
for k, v := range tmap {
vmap[k] = v
}
return nil
tmap := tree.ToMap()
for key, value := range tmap {
v[key] = value
}
return tree.Unmarshal(v)
return nil
}

View file

@ -0,0 +1,19 @@
//go:build viper_toml2
// +build viper_toml2
package toml
import (
"github.com/pelletier/go-toml/v2"
)
// Codec implements the encoding.Encoder and encoding.Decoder interfaces for TOML encoding.
type Codec struct{}
func (Codec) Encode(v map[string]interface{}) ([]byte, error) {
return toml.Marshal(v)
}
func (Codec) Decode(b []byte, v map[string]interface{}) error {
return toml.Unmarshal(b, &v)
}

View file

@ -1,14 +1,14 @@
package yaml
import "gopkg.in/yaml.v2"
// import "gopkg.in/yaml.v2"
// Codec implements the encoding.Encoder and encoding.Decoder interfaces for YAML encoding.
type Codec struct{}
func (Codec) Encode(v interface{}) ([]byte, error) {
func (Codec) Encode(v map[string]interface{}) ([]byte, error) {
return yaml.Marshal(v)
}
func (Codec) Decode(b []byte, v interface{}) error {
return yaml.Unmarshal(b, v)
func (Codec) Decode(b []byte, v map[string]interface{}) error {
return yaml.Unmarshal(b, &v)
}

View file

@ -0,0 +1,14 @@
//go:build !viper_yaml3
// +build !viper_yaml3
package yaml
import yamlv2 "gopkg.in/yaml.v2"
var yaml = struct {
Marshal func(in interface{}) (out []byte, err error)
Unmarshal func(in []byte, out interface{}) (err error)
}{
Marshal: yamlv2.Marshal,
Unmarshal: yamlv2.Unmarshal,
}

View file

@ -0,0 +1,14 @@
//go:build viper_yaml3
// +build viper_yaml3
package yaml
import yamlv3 "gopkg.in/yaml.v3"
var yaml = struct {
Marshal func(in interface{}) (out []byte, err error)
Unmarshal func(in []byte, out interface{}) (err error)
}{
Marshal: yamlv3.Marshal,
Unmarshal: yamlv3.Unmarshal,
}

View file

@ -35,16 +35,16 @@ import (
"time"
"github.com/fsnotify/fsnotify"
"github.com/magiconair/properties"
"github.com/mitchellh/mapstructure"
"github.com/spf13/afero"
"github.com/spf13/cast"
"github.com/spf13/pflag"
"github.com/subosito/gotenv"
"gopkg.in/ini.v1"
"github.com/spf13/viper/internal/encoding"
"github.com/spf13/viper/internal/encoding/dotenv"
"github.com/spf13/viper/internal/encoding/hcl"
"github.com/spf13/viper/internal/encoding/ini"
"github.com/spf13/viper/internal/encoding/javaproperties"
"github.com/spf13/viper/internal/encoding/json"
"github.com/spf13/viper/internal/encoding/toml"
"github.com/spf13/viper/internal/encoding/yaml"
@ -67,47 +67,8 @@ type RemoteResponse struct {
Error error
}
var (
encoderRegistry = encoding.NewEncoderRegistry()
decoderRegistry = encoding.NewDecoderRegistry()
)
func init() {
v = New()
{
codec := yaml.Codec{}
encoderRegistry.RegisterEncoder("yaml", codec)
decoderRegistry.RegisterDecoder("yaml", codec)
encoderRegistry.RegisterEncoder("yml", codec)
decoderRegistry.RegisterDecoder("yml", codec)
}
{
codec := json.Codec{}
encoderRegistry.RegisterEncoder("json", codec)
decoderRegistry.RegisterDecoder("json", codec)
}
{
codec := toml.Codec{}
encoderRegistry.RegisterEncoder("toml", codec)
decoderRegistry.RegisterDecoder("toml", codec)
}
{
codec := hcl.Codec{}
encoderRegistry.RegisterEncoder("hcl", codec)
decoderRegistry.RegisterDecoder("hcl", codec)
encoderRegistry.RegisterEncoder("tfvars", codec)
decoderRegistry.RegisterDecoder("tfvars", codec)
}
}
type remoteConfigFactory interface {
@ -254,13 +215,13 @@ type Viper struct {
aliases map[string]string
typeByDefValue bool
// Store read properties on the object so that we can write back in order with comments.
// This will only be used if the configuration read is a properties file.
properties *properties.Properties
onConfigChange func(fsnotify.Event)
logger Logger
// TODO: should probably be protected with a mutex
encoderRegistry *encoding.EncoderRegistry
decoderRegistry *encoding.DecoderRegistry
}
// New returns an initialized Viper instance.
@ -280,6 +241,8 @@ func New() *Viper {
v.typeByDefValue = false
v.logger = jwwLogger{}
v.resetEncoding()
return v
}
@ -326,6 +289,8 @@ func NewWithOptions(opts ...Option) *Viper {
opt.apply(v)
}
v.resetEncoding()
return v
}
@ -338,6 +303,84 @@ func Reset() {
SupportedRemoteProviders = []string{"etcd", "consul", "firestore"}
}
// TODO: make this lazy initialization instead
func (v *Viper) resetEncoding() {
encoderRegistry := encoding.NewEncoderRegistry()
decoderRegistry := encoding.NewDecoderRegistry()
{
codec := yaml.Codec{}
encoderRegistry.RegisterEncoder("yaml", codec)
decoderRegistry.RegisterDecoder("yaml", codec)
encoderRegistry.RegisterEncoder("yml", codec)
decoderRegistry.RegisterDecoder("yml", codec)
}
{
codec := json.Codec{}
encoderRegistry.RegisterEncoder("json", codec)
decoderRegistry.RegisterDecoder("json", codec)
}
{
codec := toml.Codec{}
encoderRegistry.RegisterEncoder("toml", codec)
decoderRegistry.RegisterDecoder("toml", codec)
}
{
codec := hcl.Codec{}
encoderRegistry.RegisterEncoder("hcl", codec)
decoderRegistry.RegisterDecoder("hcl", codec)
encoderRegistry.RegisterEncoder("tfvars", codec)
decoderRegistry.RegisterDecoder("tfvars", codec)
}
{
codec := ini.Codec{
KeyDelimiter: v.keyDelim,
LoadOptions: v.iniLoadOptions,
}
encoderRegistry.RegisterEncoder("ini", codec)
decoderRegistry.RegisterDecoder("ini", codec)
}
{
codec := &javaproperties.Codec{
KeyDelimiter: v.keyDelim,
}
encoderRegistry.RegisterEncoder("properties", codec)
decoderRegistry.RegisterDecoder("properties", codec)
encoderRegistry.RegisterEncoder("props", codec)
decoderRegistry.RegisterDecoder("props", codec)
encoderRegistry.RegisterEncoder("prop", codec)
decoderRegistry.RegisterDecoder("prop", codec)
}
{
codec := &dotenv.Codec{}
encoderRegistry.RegisterEncoder("dotenv", codec)
decoderRegistry.RegisterDecoder("dotenv", codec)
encoderRegistry.RegisterEncoder("env", codec)
decoderRegistry.RegisterDecoder("env", codec)
}
v.encoderRegistry = encoderRegistry
v.decoderRegistry = decoderRegistry
}
type defaultRemoteProvider struct {
provider string
endpoint string
@ -433,7 +476,7 @@ func (v *Viper) WatchConfig() {
v.onConfigChange(event)
}
} else if filepath.Clean(event.Name) == configFile &&
event.Op&fsnotify.Remove&fsnotify.Remove != 0 {
event.Op&fsnotify.Remove != 0 {
eventsWG.Done()
return
}
@ -1634,53 +1677,11 @@ func (v *Viper) unmarshalReader(in io.Reader, c map[string]interface{}) error {
buf.ReadFrom(in)
switch format := strings.ToLower(v.getConfigType()); format {
case "yaml", "yml", "json", "toml", "hcl", "tfvars":
err := decoderRegistry.Decode(format, buf.Bytes(), &c)
case "yaml", "yml", "json", "toml", "hcl", "tfvars", "ini", "properties", "props", "prop", "dotenv", "env":
err := v.decoderRegistry.Decode(format, buf.Bytes(), c)
if err != nil {
return ConfigParseError{err}
}
case "dotenv", "env":
env, err := gotenv.StrictParse(buf)
if err != nil {
return ConfigParseError{err}
}
for k, v := range env {
c[k] = v
}
case "properties", "props", "prop":
v.properties = properties.NewProperties()
var err error
if v.properties, err = properties.Load(buf.Bytes(), properties.UTF8); err != nil {
return ConfigParseError{err}
}
for _, key := range v.properties.Keys() {
value, _ := v.properties.Get(key)
// recursively build nested maps
path := strings.Split(key, ".")
lastKey := strings.ToLower(path[len(path)-1])
deepestMap := deepSearch(c, path[0:len(path)-1])
// set innermost value
deepestMap[lastKey] = value
}
case "ini":
cfg := ini.Empty(v.iniLoadOptions)
err := cfg.Append(buf.Bytes())
if err != nil {
return ConfigParseError{err}
}
sections := cfg.Sections()
for i := 0; i < len(sections); i++ {
section := sections[i]
keys := section.Keys()
for j := 0; j < len(keys); j++ {
key := keys[j]
value := cfg.Section(section.Name()).Key(key.Name()).String()
c[section.Name()+"."+key.Name()] = value
}
}
}
insensitiviseMap(c)
@ -1691,8 +1692,8 @@ func (v *Viper) unmarshalReader(in io.Reader, c map[string]interface{}) error {
func (v *Viper) marshalWriter(f afero.File, configType string) error {
c := v.AllSettings()
switch configType {
case "yaml", "yml", "json", "toml", "hcl", "tfvars":
b, err := encoderRegistry.Encode(configType, c)
case "yaml", "yml", "json", "toml", "hcl", "tfvars", "ini", "prop", "props", "properties", "dotenv", "env":
b, err := v.encoderRegistry.Encode(configType, c)
if err != nil {
return ConfigMarshalError{err}
}
@ -1701,50 +1702,6 @@ func (v *Viper) marshalWriter(f afero.File, configType string) error {
if err != nil {
return ConfigMarshalError{err}
}
case "prop", "props", "properties":
if v.properties == nil {
v.properties = properties.NewProperties()
}
p := v.properties
for _, key := range v.AllKeys() {
_, _, err := p.Set(key, v.GetString(key))
if err != nil {
return ConfigMarshalError{err}
}
}
_, err := p.WriteComment(f, "#", properties.UTF8)
if err != nil {
return ConfigMarshalError{err}
}
case "dotenv", "env":
lines := []string{}
for _, key := range v.AllKeys() {
envName := strings.ToUpper(strings.Replace(key, ".", "_", -1))
val := v.Get(key)
lines = append(lines, fmt.Sprintf("%v=%v", envName, val))
}
s := strings.Join(lines, "\n")
if _, err := f.WriteString(s); err != nil {
return ConfigMarshalError{err}
}
case "ini":
keys := v.AllKeys()
cfg := ini.Empty()
ini.PrettyFormat = false
for i := 0; i < len(keys); i++ {
key := keys[i]
lastSep := strings.LastIndex(key, ".")
sectionName := key[:(lastSep)]
keyName := key[(lastSep + 1):]
if sectionName == "default" {
sectionName = ""
}
cfg.Section(sectionName).Key(keyName).SetValue(v.GetString(key))
}
cfg.WriteTo(f)
}
return nil
}
@ -1761,7 +1718,8 @@ func keyExists(k string, m map[string]interface{}) string {
}
func castToMapStringInterface(
src map[interface{}]interface{}) map[string]interface{} {
src map[interface{}]interface{},
) map[string]interface{} {
tgt := map[string]interface{}{}
for k, v := range src {
tgt[fmt.Sprintf("%v", k)] = v
@ -1799,7 +1757,8 @@ func castMapFlagToMapInterface(src map[string]FlagValue) map[string]interface{}
// deep. Both map types are supported as there is a go-yaml fork that uses
// `map[string]interface{}` instead.
func mergeMaps(
src, tgt map[string]interface{}, itgt map[interface{}]interface{}) {
src, tgt map[string]interface{}, itgt map[interface{}]interface{},
) {
for sk, sv := range src {
tk := keyExists(sk, tgt)
if tk == "" {
@ -1823,17 +1782,6 @@ func mergeMaps(
svType := reflect.TypeOf(sv)
tvType := reflect.TypeOf(tv)
if tvType != nil && svType != tvType { // Allow for the target to be nil
v.logger.Error(
"svType != tvType",
"key", sk,
"st", svType,
"tt", tvType,
"sv", sv,
"tv", tv,
)
continue
}
v.logger.Trace(
"processing",
@ -1847,13 +1795,27 @@ func mergeMaps(
switch ttv := tv.(type) {
case map[interface{}]interface{}:
v.logger.Trace("merging maps (must convert)")
tsv := sv.(map[interface{}]interface{})
tsv, ok := sv.(map[interface{}]interface{})
if !ok {
v.logger.Error(
"Could not cast sv to map[interface{}]interface{}; key=%s, st=%v, tt=%v, sv=%v, tv=%v",
sk, svType, tvType, sv, tv)
continue
}
ssv := castToMapStringInterface(tsv)
stv := castToMapStringInterface(ttv)
mergeMaps(ssv, stv, ttv)
case map[string]interface{}:
v.logger.Trace("merging maps")
mergeMaps(sv.(map[string]interface{}), ttv, nil)
tsv, ok := sv.(map[string]interface{})
if !ok {
v.logger.Error(
"Could not cast sv to map[string]interface{}; key=%s, st=%v, tt=%v, sv=%v, tv=%v",
sk, svType, tvType, sv, tv)
continue
}
mergeMaps(tsv, ttv, nil)
default:
v.logger.Trace("setting value")
tgt[tk] = sv