diff --git a/commands/command_clone.go b/commands/command_clone.go index 80536261..a2db0ce9 100644 --- a/commands/command_clone.go +++ b/commands/command_clone.go @@ -72,13 +72,9 @@ func cloneCommand(cmd *cobra.Command, args []string) { requireInRepo() - // Now just call pull with default args // Support --origin option to clone - var remote string if len(cloneFlags.Origin) > 0 { - remote = cloneFlags.Origin - } else { - remote = "origin" + cfg.SetRemote(cloneFlags.Origin) } if ref, err := git.CurrentRef(); err == nil { @@ -86,10 +82,9 @@ func cloneCommand(cmd *cobra.Command, args []string) { filter := buildFilepathFilter(cfg, includeArg, excludeArg) if cloneFlags.NoCheckout || cloneFlags.Bare { // If --no-checkout or --bare then we shouldn't check out, just fetch instead - cfg.CurrentRemote = remote fetchRef(ref.Name, filter) } else { - pull(remote, filter) + pull(filter) err := postCloneSubmodules(args) if err != nil { Exit("Error performing 'git lfs pull' for submodules: %v", err) diff --git a/commands/command_env.go b/commands/command_env.go index cc1090ec..d4633189 100644 --- a/commands/command_env.go +++ b/commands/command_env.go @@ -9,7 +9,6 @@ import ( func envCommand(cmd *cobra.Command, args []string) { config.ShowConfigWarnings = true - endpoint := getAPIClient().Endpoints.Endpoint("download", cfg.CurrentRemote) gitV, err := git.Version() if err != nil { @@ -20,11 +19,14 @@ func envCommand(cmd *cobra.Command, args []string) { Print(gitV) Print("") - if len(endpoint.Url) > 0 { - access := getAPIClient().Endpoints.AccessFor(endpoint.Url) - Print("Endpoint=%s (auth=%s)", endpoint.Url, access) - if len(endpoint.SshUserAndHost) > 0 { - Print(" SSH=%s:%s", endpoint.SshUserAndHost, endpoint.SshPath) + if cfg.IsDefaultRemote() { + endpoint := getAPIClient().Endpoints.Endpoint("download", cfg.Remote()) + if len(endpoint.Url) > 0 { + access := getAPIClient().Endpoints.AccessFor(endpoint.Url) + Print("Endpoint=%s (auth=%s)", endpoint.Url, access) + if len(endpoint.SshUserAndHost) > 0 { + Print(" SSH=%s:%s", endpoint.SshUserAndHost, endpoint.SshPath) + } } } diff --git a/commands/command_fetch.go b/commands/command_fetch.go index 1026f42e..4826146d 100644 --- a/commands/command_fetch.go +++ b/commands/command_fetch.go @@ -39,12 +39,9 @@ func fetchCommand(cmd *cobra.Command, args []string) { if len(args) > 0 { // Remote is first arg - if err := git.ValidateRemote(args[0]); err != nil { - Exit("Invalid remote name %q", args[0]) + if err := cfg.SetValidRemote(args[0]); err != nil { + Exit("Invalid remote name %q: %s", args[0], err) } - cfg.CurrentRemote = args[0] - } else { - cfg.CurrentRemote = "" } if len(args) > 1 { @@ -104,7 +101,7 @@ func fetchCommand(cmd *cobra.Command, args []string) { if !success { c := getAPIClient() - e := c.Endpoints.Endpoint("download", cfg.CurrentRemote) + e := c.Endpoints.Endpoint("download", cfg.Remote()) Exit("error: failed to fetch some objects from '%s'", e.Url) } } @@ -184,7 +181,7 @@ func fetchRecent(fetchconf lfs.FetchPruneConfig, alreadyFetchedRefs []*git.Ref, if fetchconf.FetchRecentRefsDays > 0 { Print("Fetching recent branches within %v days", fetchconf.FetchRecentRefsDays) refsSince := time.Now().AddDate(0, 0, -fetchconf.FetchRecentRefsDays) - refs, err := git.RecentBranches(refsSince, fetchconf.FetchRecentRefsIncludeRemotes, cfg.CurrentRemote) + refs, err := git.RecentBranches(refsSince, fetchconf.FetchRecentRefsIncludeRemotes, cfg.Remote()) if err != nil { Panic(err, "Could not scan for recent refs") } @@ -268,20 +265,10 @@ func scanAll() []*lfs.WrappedPointer { // Fetch and report completion of each OID to a channel (optional, pass nil to skip) // Returns true if all completed with no errors, false if errors were written to stderr/log func fetchAndReportToChan(allpointers []*lfs.WrappedPointer, filter *filepathfilter.Filter, out chan<- *lfs.WrappedPointer) bool { - // Lazily initialize the current remote. - if len(cfg.CurrentRemote) == 0 { - // Actively find the default remote, don't just assume origin - defaultRemote, err := cfg.GitConfig().DefaultRemote() - if err != nil { - Exit("No default remote") - } - cfg.CurrentRemote = defaultRemote - } - ready, pointers, meter := readyAndMissingPointers(allpointers, filter) q := newDownloadQueue( - getTransferManifestOperationRemote("download", cfg.CurrentRemote), - cfg.CurrentRemote, tq.WithProgress(meter), + getTransferManifestOperationRemote("download", cfg.Remote()), + cfg.Remote(), tq.WithProgress(meter), ) if out != nil { diff --git a/commands/command_filter_process.go b/commands/command_filter_process.go index 56554fa2..16373bf9 100644 --- a/commands/command_filter_process.go +++ b/commands/command_filter_process.go @@ -69,8 +69,8 @@ func filterCommand(cmd *cobra.Command, args []string) { if supportsDelay { q = tq.NewTransferQueue( tq.Download, - getTransferManifestOperationRemote("download", cfg.CurrentRemote), - cfg.CurrentRemote, + getTransferManifestOperationRemote("download", cfg.Remote()), + cfg.Remote(), ) go infiniteTransferBuffer(q, available) } diff --git a/commands/command_lock.go b/commands/command_lock.go index 51a2d619..6043633d 100644 --- a/commands/command_lock.go +++ b/commands/command_lock.go @@ -28,7 +28,11 @@ func lockCommand(cmd *cobra.Command, args []string) { Exit(err.Error()) } - lockClient := newLockClient(lockRemote) + if len(lockRemote) > 0 { + cfg.SetRemote(lockRemote) + } + + lockClient := newLockClient() defer lockClient.Close() lock, err := lockClient.LockFile(path) @@ -90,7 +94,7 @@ func lockPath(file string) (string, error) { func init() { RegisterCommand("lock", lockCommand, func(cmd *cobra.Command) { - cmd.Flags().StringVarP(&lockRemote, "remote", "r", cfg.CurrentRemote, lockRemoteHelp) + cmd.Flags().StringVarP(&lockRemote, "remote", "r", "", lockRemoteHelp) cmd.Flags().BoolVarP(&locksCmdFlags.JSON, "json", "", false, "print output in json") }) } diff --git a/commands/command_locks.go b/commands/command_locks.go index ed49d143..fa3d48ef 100644 --- a/commands/command_locks.go +++ b/commands/command_locks.go @@ -22,7 +22,11 @@ func locksCommand(cmd *cobra.Command, args []string) { Exit("Error building filters: %v", err) } - lockClient := newLockClient(lockRemote) + if len(lockRemote) > 0 { + cfg.SetRemote(lockRemote) + } + + lockClient := newLockClient() defer lockClient.Close() locks, err := lockClient.SearchLocks(filters, locksCmdFlags.Limit, locksCmdFlags.Local) @@ -109,7 +113,7 @@ func (l *locksFlags) Filters() (map[string]string, error) { func init() { RegisterCommand("locks", locksCommand, func(cmd *cobra.Command) { - cmd.Flags().StringVarP(&lockRemote, "remote", "r", cfg.CurrentRemote, lockRemoteHelp) + cmd.Flags().StringVarP(&lockRemote, "remote", "r", "", lockRemoteHelp) cmd.Flags().StringVarP(&locksCmdFlags.Path, "path", "p", "", "filter locks results matching a particular path") cmd.Flags().StringVarP(&locksCmdFlags.Id, "id", "i", "", "filter locks results matching a particular ID") cmd.Flags().IntVarP(&locksCmdFlags.Limit, "limit", "l", 0, "optional limit for number of results to return") diff --git a/commands/command_migrate.go b/commands/command_migrate.go index c15e8185..29fe8542 100644 --- a/commands/command_migrate.go +++ b/commands/command_migrate.go @@ -1,6 +1,7 @@ package commands import ( + "fmt" "path/filepath" "strings" @@ -20,6 +21,11 @@ var ( // in the migration. migrateExcludeRefs []string + // migrateSkipFetch assumes that the client has the latest copy of + // remote references, and thus should not contact the remote for a set + // of updated references. + migrateSkipFetch bool + // migrateEverything indicates the presence of the --everything flag, // and instructs 'git lfs migrate' to migrate all local references. migrateEverything bool @@ -122,7 +128,7 @@ func includeExcludeRefs(l *log.Logger, args []string) (include, exclude []string return nil, nil, err } - include = append(include, ref.Name) + include = append(include, ref.Refspec()) } if hardcore { @@ -142,7 +148,7 @@ func includeExcludeRefs(l *log.Logger, args []string) (include, exclude []string } for _, ref := range localRefs { - include = append(include, ref.Name) + include = append(include, ref.Refspec()) } } else { // Otherwise, if neither --include-ref= or @@ -154,7 +160,9 @@ func includeExcludeRefs(l *log.Logger, args []string) (include, exclude []string return nil, nil, err } - exclude = append(exclude, remoteRefs...) + for _, rr := range remoteRefs { + exclude = append(exclude, rr.Refspec()) + } } return include, exclude, nil @@ -163,28 +171,40 @@ func includeExcludeRefs(l *log.Logger, args []string) (include, exclude []string // getRemoteRefs returns a fully qualified set of references belonging to all // remotes known by the currently checked-out repository, or an error if those // references could not be determined. -func getRemoteRefs(l *log.Logger) ([]string, error) { - var refs []string +func getRemoteRefs(l *log.Logger) ([]*git.Ref, error) { + var refs []*git.Ref remotes, err := git.RemoteList() if err != nil { return nil, err } - w := l.Waiter("migrate: Fetching remote refs") - if err := git.Fetch(remotes...); err != nil { - return nil, err + if !migrateSkipFetch { + w := l.Waiter("migrate: Fetching remote refs") + if err := git.Fetch(remotes...); err != nil { + return nil, err + } + w.Complete() } - w.Complete() for _, remote := range remotes { - refsForRemote, err := git.RemoteRefs(remote) + var refsForRemote []*git.Ref + if migrateSkipFetch { + refsForRemote, err = git.CachedRemoteRefs(remote) + } else { + refsForRemote, err = git.RemoteRefs(remote) + } + if err != nil { return nil, err } - for _, ref := range refsForRemote { - refs = append(refs, formatRefName(ref, remote)) + for _, rr := range refsForRemote { + // HACK(@ttaylorr): add remote name to fully-qualify + // references: + rr.Name = fmt.Sprintf("%s/%s", remote, rr.Name) + + refs = append(refs, rr) } } @@ -252,6 +272,7 @@ func init() { cmd.PersistentFlags().StringSliceVar(&migrateIncludeRefs, "include-ref", nil, "An explicit list of refs to include") cmd.PersistentFlags().StringSliceVar(&migrateExcludeRefs, "exclude-ref", nil, "An explicit list of refs to exclude") cmd.PersistentFlags().BoolVar(&migrateEverything, "everything", false, "Migrate all local references") + cmd.PersistentFlags().BoolVar(&migrateSkipFetch, "skip-fetch", false, "Assume up-to-date remote references.") cmd.AddCommand(importCmd, info) }) diff --git a/commands/command_post_checkout.go b/commands/command_post_checkout.go index 4fd772fd..0057d6cc 100644 --- a/commands/command_post_checkout.go +++ b/commands/command_post_checkout.go @@ -32,7 +32,7 @@ func postCheckoutCommand(cmd *cobra.Command, args []string) { requireGitVersion() - lockClient := newLockClient(cfg.CurrentRemote) + lockClient := newLockClient() // Skip this hook if no lockable patterns have been configured if len(lockClient.GetLockablePatterns()) == 0 { diff --git a/commands/command_post_commit.go b/commands/command_post_commit.go index 0ea2c731..64d567fe 100644 --- a/commands/command_post_commit.go +++ b/commands/command_post_commit.go @@ -24,7 +24,7 @@ func postCommitCommand(cmd *cobra.Command, args []string) { requireGitVersion() - lockClient := newLockClient(cfg.CurrentRemote) + lockClient := newLockClient() // Skip this hook if no lockable patterns have been configured if len(lockClient.GetLockablePatterns()) == 0 { diff --git a/commands/command_post_merge.go b/commands/command_post_merge.go index 485a1614..03d4392b 100644 --- a/commands/command_post_merge.go +++ b/commands/command_post_merge.go @@ -24,7 +24,7 @@ func postMergeCommand(cmd *cobra.Command, args []string) { requireGitVersion() - lockClient := newLockClient(cfg.CurrentRemote) + lockClient := newLockClient() // Skip this hook if no lockable patterns have been configured if len(lockClient.GetLockablePatterns()) == 0 { diff --git a/commands/command_pre_push.go b/commands/command_pre_push.go index 83d9babd..a5a0b280 100644 --- a/commands/command_pre_push.go +++ b/commands/command_pre_push.go @@ -2,6 +2,7 @@ package commands import ( "bufio" + "io" "os" "strings" @@ -46,59 +47,59 @@ func prePushCommand(cmd *cobra.Command, args []string) { requireGitVersion() // Remote is first arg - if err := git.ValidateRemote(args[0]); err != nil { - Exit("Invalid remote name %q", args[0]) + if err := cfg.SetValidRemote(args[0]); err != nil { + Exit("Invalid remote name %q: %s", args[0], err) } - ctx := newUploadContext(args[0], prePushDryRun) - - gitscanner, err := ctx.buildGitScanner() - if err != nil { + ctx := newUploadContext(prePushDryRun) + updates := prePushRefs(os.Stdin) + if err := uploadForRefUpdates(ctx, updates, false); err != nil { ExitWithError(err) } - defer gitscanner.Close() +} + +// prePushRefs parses commit information that the pre-push git hook receives: +// +// +// +// Each line describes a proposed update of the remote ref at the remote sha to +// the local sha. Multiple updates can be received on multiple lines (such as +// from 'git push --all'). These updates are typically received over STDIN. +func prePushRefs(r io.Reader) []*refUpdate { + scanner := bufio.NewScanner(r) + refs := make([]*refUpdate, 0, 1) // We can be passed multiple lines of refs - scanner := bufio.NewScanner(os.Stdin) - for scanner.Scan() { line := strings.TrimSpace(scanner.Text()) - if len(line) == 0 { continue } tracerx.Printf("pre-push: %s", line) - left, _ := decodeRefs(line) - if left == prePushDeleteBranch { + left, right := decodeRefs(line) + if left.Sha == prePushDeleteBranch { continue } - if err := uploadLeftOrAll(gitscanner, ctx, left); err != nil { - Print("Error scanning for Git LFS files in %q", left) - ExitWithError(err) - } + refs = append(refs, newRefUpdate(cfg.Git, cfg.PushRemote(), left, right)) } - ctx.Await() + return refs } // decodeRefs pulls the sha1s out of the line read from the pre-push // hook's stdin. -func decodeRefs(input string) (string, string) { +func decodeRefs(input string) (*git.Ref, *git.Ref) { refs := strings.Split(strings.TrimSpace(input), " ") - var left, right string - - if len(refs) > 1 { - left = refs[1] + for len(refs) < 4 { + refs = append(refs, "") } - if len(refs) > 3 { - right = "^" + refs[3] - } - - return left, right + leftRef := git.ParseRef(refs[0], refs[1]) + rightRef := git.ParseRef(refs[2], refs[3]) + return leftRef, rightRef } func init() { diff --git a/commands/command_pull.go b/commands/command_pull.go index 2a5ee179..ae261b11 100644 --- a/commands/command_pull.go +++ b/commands/command_pull.go @@ -18,29 +18,19 @@ func pullCommand(cmd *cobra.Command, args []string) { requireGitVersion() requireInRepo() - var remote string if len(args) > 0 { // Remote is first arg - if err := git.ValidateRemote(args[0]); err != nil { - Panic(err, fmt.Sprintf("Invalid remote name '%v'", args[0])) + if err := cfg.SetValidRemote(args[0]); err != nil { + Exit("Invalid remote name %q: %s", args[0], err) } - remote = args[0] - } else { - // Actively find the default remote, don't just assume origin - defaultRemote, err := cfg.GitConfig().DefaultRemote() - if err != nil { - Panic(err, "No default remote") - } - remote = defaultRemote } includeArg, excludeArg := getIncludeExcludeArgs(cmd) filter := buildFilepathFilter(cfg, includeArg, excludeArg) - pull(remote, filter) + pull(filter) } -func pull(remote string, filter *filepathfilter.Filter) { - cfg.CurrentRemote = remote +func pull(filter *filepathfilter.Filter) { ref, err := git.CurrentRef() if err != nil { Panic(err, "Could not pull") @@ -48,6 +38,7 @@ func pull(remote string, filter *filepathfilter.Filter) { pointers := newPointerMap() meter := progress.NewMeter(progress.WithOSEnv(cfg.Os)) + remote := cfg.Remote() singleCheckout := newSingleCheckout(cfg.Git, remote) q := newDownloadQueue(singleCheckout.Manifest(), remote, tq.WithProgress(meter)) gitscanner := lfs.NewGitScanner(func(p *lfs.WrappedPointer, err error) { diff --git a/commands/command_push.go b/commands/command_push.go index 838e6223..564287eb 100644 --- a/commands/command_push.go +++ b/commands/command_push.go @@ -19,82 +19,6 @@ var ( // shares some global vars and functions with command_pre_push.go ) -func uploadsBetweenRefAndRemote(ctx *uploadContext, refnames []string) { - tracerx.Printf("Upload refs %v to remote %v", refnames, ctx.Remote) - - gitscanner, err := ctx.buildGitScanner() - if err != nil { - ExitWithError(err) - } - defer gitscanner.Close() - - refs, err := refsByNames(refnames) - if err != nil { - Error(err.Error()) - Exit("Error getting local refs.") - } - - for _, ref := range refs { - if err = uploadLeftOrAll(gitscanner, ctx, ref.Name); err != nil { - Print("Error scanning for Git LFS files in the %q ref", ref.Name) - ExitWithError(err) - } - } - - ctx.Await() -} - -func uploadsWithObjectIDs(ctx *uploadContext, oids []string) { - for _, oid := range oids { - mp, err := ctx.gitfilter.ObjectPath(oid) - if err != nil { - ExitWithError(errors.Wrap(err, "Unable to find local media path:")) - } - - stat, err := os.Stat(mp) - if err != nil { - ExitWithError(errors.Wrap(err, "Unable to stat local media path")) - } - - uploadPointers(ctx, &lfs.WrappedPointer{ - Name: mp, - Pointer: &lfs.Pointer{ - Oid: oid, - Size: stat.Size(), - }, - }) - } - - ctx.Await() -} - -func refsByNames(refnames []string) ([]*git.Ref, error) { - localrefs, err := git.LocalRefs() - if err != nil { - return nil, err - } - - if pushAll && len(refnames) == 0 { - return localrefs, nil - } - - reflookup := make(map[string]*git.Ref, len(localrefs)) - for _, ref := range localrefs { - reflookup[ref.Name] = ref - } - - refs := make([]*git.Ref, len(refnames)) - for i, name := range refnames { - if ref, ok := reflookup[name]; ok { - refs[i] = ref - } else { - refs[i] = &git.Ref{Name: name, Type: git.RefTypeOther, Sha: name} - } - } - - return refs, nil -} - // pushCommand pushes local objects to a Git LFS server. It takes two // arguments: // @@ -113,12 +37,11 @@ func pushCommand(cmd *cobra.Command, args []string) { requireGitVersion() // Remote is first arg - if err := git.ValidateRemote(args[0]); err != nil { - Exit("Invalid remote name %q", args[0]) + if err := cfg.SetValidRemote(args[0]); err != nil { + Exit("Invalid remote name %q: %s", args[0], err) } - ctx := newUploadContext(args[0], pushDryRun) - + ctx := newUploadContext(pushDryRun) if pushObjectIDs { if len(args) < 2 { Print("Usage: git lfs push --object-id [lfs-object-id] ...") @@ -136,6 +59,81 @@ func pushCommand(cmd *cobra.Command, args []string) { } } +func uploadsBetweenRefAndRemote(ctx *uploadContext, refnames []string) { + tracerx.Printf("Upload refs %v to remote %v", refnames, ctx.Remote) + + updates, err := lfsPushRefs(refnames, pushAll) + if err != nil { + Error(err.Error()) + Exit("Error getting local refs.") + } + + if err := uploadForRefUpdates(ctx, updates, pushAll); err != nil { + ExitWithError(err) + } +} + +func uploadsWithObjectIDs(ctx *uploadContext, oids []string) { + pointers := make([]*lfs.WrappedPointer, len(oids)) + for i, oid := range oids { + mp, err := ctx.gitfilter.ObjectPath(oid) + if err != nil { + ExitWithError(errors.Wrap(err, "Unable to find local media path:")) + } + + stat, err := os.Stat(mp) + if err != nil { + ExitWithError(errors.Wrap(err, "Unable to stat local media path")) + } + + pointers[i] = &lfs.WrappedPointer{ + Name: mp, + Pointer: &lfs.Pointer{ + Oid: oid, + Size: stat.Size(), + }, + } + } + + uploadPointers(ctx, pointers...) + ctx.Await() +} + +// lfsPushRefs returns valid ref updates from the given ref and --all arguments. +// Either one or more refs can be explicitly specified, or --all indicates all +// local refs are pushed. +func lfsPushRefs(refnames []string, pushAll bool) ([]*refUpdate, error) { + localrefs, err := git.LocalRefs() + if err != nil { + return nil, err + } + + if pushAll && len(refnames) == 0 { + refs := make([]*refUpdate, len(localrefs)) + for i, lr := range localrefs { + refs[i] = newRefUpdate(cfg.Git, cfg.PushRemote(), lr, nil) + } + return refs, nil + } + + reflookup := make(map[string]*git.Ref, len(localrefs)) + for _, ref := range localrefs { + reflookup[ref.Name] = ref + } + + refs := make([]*refUpdate, len(refnames)) + for i, name := range refnames { + if left, ok := reflookup[name]; ok { + refs[i] = newRefUpdate(cfg.Git, cfg.PushRemote(), left, nil) + } else { + left := &git.Ref{Name: name, Type: git.RefTypeOther, Sha: name} + refs[i] = newRefUpdate(cfg.Git, cfg.PushRemote(), left, nil) + } + } + + return refs, nil +} + func init() { RegisterCommand("push", pushCommand, func(cmd *cobra.Command) { cmd.Flags().BoolVarP(&pushDryRun, "dry-run", "d", false, "Do everything except actually send the updates") diff --git a/commands/command_track.go b/commands/command_track.go index 057fb635..3472f9c3 100644 --- a/commands/command_track.go +++ b/commands/command_track.go @@ -205,7 +205,7 @@ ArgsLoop: } // now flip read-only mode based on lockable / not lockable changes - lockClient := newLockClient(cfg.CurrentRemote) + lockClient := newLockClient() err = lockClient.FixFileWriteFlagsInDir(relpath, readOnlyPatterns, writeablePatterns) if err != nil { LoggedError(err, "Error changing lockable file permissions: %s", err) diff --git a/commands/command_unlock.go b/commands/command_unlock.go index 0ca2e382..3334bf69 100644 --- a/commands/command_unlock.go +++ b/commands/command_unlock.go @@ -35,7 +35,11 @@ func unlockCommand(cmd *cobra.Command, args []string) { Exit(unlockUsage) } - lockClient := newLockClient(lockRemote) + if len(lockRemote) > 0 { + cfg.SetRemote(lockRemote) + } + + lockClient := newLockClient() defer lockClient.Close() if hasPath { @@ -131,7 +135,7 @@ func unlockAbortIfFileModifiedById(id string, lockClient *locking.Client) { func init() { RegisterCommand("unlock", unlockCommand, func(cmd *cobra.Command) { - cmd.Flags().StringVarP(&lockRemote, "remote", "r", cfg.CurrentRemote, lockRemoteHelp) + cmd.Flags().StringVarP(&lockRemote, "remote", "r", "", lockRemoteHelp) cmd.Flags().StringVarP(&unlockCmdFlags.Id, "id", "i", "", "unlock a lock by its ID") cmd.Flags().BoolVarP(&unlockCmdFlags.Force, "force", "f", false, "forcibly break another user's lock(s)") cmd.Flags().BoolVarP(&locksCmdFlags.JSON, "json", "", false, "print output in json") diff --git a/commands/commands.go b/commands/commands.go index 887b817f..35940a85 100644 --- a/commands/commands.go +++ b/commands/commands.go @@ -90,8 +90,8 @@ func closeAPIClient() error { return apiClient.Close() } -func newLockClient(remote string) *locking.Client { - lockClient, err := locking.NewClient(remote, getAPIClient()) +func newLockClient() *locking.Client { + lockClient, err := locking.NewClient(cfg.PushRemote(), getAPIClient()) if err == nil { os.MkdirAll(cfg.LFSStorageDir(), 0755) err = lockClient.SetupFileCache(cfg.LFSStorageDir()) diff --git a/commands/lockverifier.go b/commands/lockverifier.go new file mode 100644 index 00000000..80fa8855 --- /dev/null +++ b/commands/lockverifier.go @@ -0,0 +1,239 @@ +package commands + +import ( + "fmt" + "sort" + "strconv" + "strings" + + "github.com/git-lfs/git-lfs/config" + "github.com/git-lfs/git-lfs/errors" + "github.com/git-lfs/git-lfs/git" + "github.com/git-lfs/git-lfs/lfsapi" + "github.com/git-lfs/git-lfs/locking" + "github.com/git-lfs/git-lfs/tq" +) + +type verifyState byte + +const ( + verifyStateUnknown verifyState = iota + verifyStateEnabled + verifyStateDisabled +) + +func verifyLocksForUpdates(lv *lockVerifier, updates []*refUpdate) { + for _, update := range updates { + lv.Verify(update.Right()) + } +} + +// lockVerifier verifies locked files before updating one or more refs. +type lockVerifier struct { + endpoint lfsapi.Endpoint + verifyState verifyState + verifiedRefs map[string]bool + + // all existing locks + ourLocks map[string]*refLock + theirLocks map[string]*refLock + + // locks from ourLocks that have been modified + ownedLocks []*refLock + + // locks from theirLocks that have been modified + unownedLocks []*refLock +} + +func (lv *lockVerifier) Verify(ref *git.Ref) { + if lv.verifyState == verifyStateDisabled || lv.verifiedRefs[ref.Refspec()] { + return + } + + lockClient := newLockClient() + ours, theirs, err := lockClient.VerifiableLocks(ref, 0) + if err != nil { + if errors.IsNotImplementedError(err) { + disableFor(lv.endpoint.Url) + } else if lv.verifyState == verifyStateUnknown || lv.verifyState == verifyStateEnabled { + if errors.IsAuthError(err) { + if lv.verifyState == verifyStateUnknown { + Error("WARNING: Authentication error: %s", err) + } else if lv.verifyState == verifyStateEnabled { + Exit("ERROR: Authentication error: %s", err) + } + } else { + Print("Remote %q does not support the LFS locking API. Consider disabling it with:", cfg.PushRemote()) + Print(" $ git config lfs.%s.locksverify false", lv.endpoint.Url) + if lv.verifyState == verifyStateEnabled { + ExitWithError(err) + } + } + } + } else if lv.verifyState == verifyStateUnknown { + Print("Locking support detected on remote %q. Consider enabling it with:", cfg.PushRemote()) + Print(" $ git config lfs.%s.locksverify true", lv.endpoint.Url) + } + + lv.addLocks(ref, ours, lv.ourLocks) + lv.addLocks(ref, theirs, lv.theirLocks) + lv.verifiedRefs[ref.Refspec()] = true +} + +func (lv *lockVerifier) addLocks(ref *git.Ref, locks []locking.Lock, set map[string]*refLock) { + for _, l := range locks { + if rl, ok := set[l.Path]; ok { + if err := rl.Add(ref, l); err != nil { + Error("WARNING: error adding %q lock for ref %q: %+v", l.Path, ref, err) + } + } else { + set[l.Path] = lv.newRefLocks(ref, l) + } + } +} + +// Determines if a filename is lockable. Implements lfs.GitScannerSet +func (lv *lockVerifier) Contains(name string) bool { + if lv == nil { + return false + } + _, ok := lv.theirLocks[name] + return ok +} + +func (lv *lockVerifier) LockedByThem(name string) bool { + if lock, ok := lv.theirLocks[name]; ok { + lv.unownedLocks = append(lv.unownedLocks, lock) + return true + } + return false +} + +func (lv *lockVerifier) LockedByUs(name string) bool { + if lock, ok := lv.ourLocks[name]; ok { + lv.ownedLocks = append(lv.ownedLocks, lock) + return true + } + return false +} + +func (lv *lockVerifier) UnownedLocks() []*refLock { + return lv.unownedLocks +} + +func (lv *lockVerifier) HasUnownedLocks() bool { + return len(lv.unownedLocks) > 0 +} + +func (lv *lockVerifier) OwnedLocks() []*refLock { + return lv.ownedLocks +} + +func (lv *lockVerifier) HasOwnedLocks() bool { + return len(lv.ownedLocks) > 0 +} + +func (lv *lockVerifier) Enabled() bool { + return lv.verifyState == verifyStateEnabled +} + +func (lv *lockVerifier) newRefLocks(ref *git.Ref, l locking.Lock) *refLock { + return &refLock{ + allRefs: lv.verifiedRefs, + path: l.Path, + refs: map[*git.Ref]locking.Lock{ref: l}, + } +} + +func newLockVerifier(m *tq.Manifest) *lockVerifier { + lv := &lockVerifier{ + endpoint: getAPIClient().Endpoints.Endpoint("upload", cfg.PushRemote()), + verifiedRefs: make(map[string]bool), + ourLocks: make(map[string]*refLock), + theirLocks: make(map[string]*refLock), + } + + // Do not check locks for standalone transfer, because there is no LFS + // server to ask. + if m.IsStandaloneTransfer() { + lv.verifyState = verifyStateDisabled + } else { + lv.verifyState = getVerifyStateFor(lv.endpoint.Url) + } + + return lv +} + +// refLock represents a unique locked file path, potentially across multiple +// refs. It tracks each individual lock in case different users locked the +// same path across multiple refs. +type refLock struct { + path string + allRefs map[string]bool + refs map[*git.Ref]locking.Lock +} + +// Path returns the locked path. +func (r *refLock) Path() string { + return r.path +} + +// Owners returns the list of owners that locked this file, including what +// specific refs the files were locked in. If a user locked a file on all refs, +// don't bother listing them. +// +// Example: technoweenie, bob (refs: foo) +func (r *refLock) Owners() string { + users := make(map[string][]string, len(r.refs)) + for ref, lock := range r.refs { + u := lock.Owner.Name + if _, ok := users[u]; !ok { + users[u] = make([]string, 0, len(r.refs)) + } + users[u] = append(users[u], ref.Name) + } + + owners := make([]string, 0, len(users)) + for name, refs := range users { + seenRefCount := 0 + for _, ref := range refs { + if r.allRefs[ref] { + seenRefCount++ + } + } + if seenRefCount == len(r.allRefs) { // lock is included in all refs, so don't list them + owners = append(owners, name) + continue + } + + sort.Strings(refs) + owners = append(owners, fmt.Sprintf("%s (refs: %s)", name, strings.Join(refs, ", "))) + } + sort.Strings(owners) + return strings.Join(owners, ", ") +} + +func (r *refLock) Add(ref *git.Ref, l locking.Lock) error { + r.refs[ref] = l + return nil +} + +// getVerifyStateFor returns whether or not lock verification is enabled for the +// given url. If no state has been explicitly set, an "unknown" state will be +// returned instead. +func getVerifyStateFor(rawurl string) verifyState { + uc := config.NewURLConfig(cfg.Git) + + v, ok := uc.Get("lfs", rawurl, "locksverify") + if !ok { + if supportsLockingAPI(rawurl) { + return verifyStateEnabled + } + return verifyStateUnknown + } + + if enabled, _ := strconv.ParseBool(v); enabled { + return verifyStateEnabled + } + return verifyStateDisabled +} diff --git a/commands/refs.go b/commands/refs.go new file mode 100644 index 00000000..bc57fff9 --- /dev/null +++ b/commands/refs.go @@ -0,0 +1,91 @@ +package commands + +import ( + "fmt" + + "github.com/git-lfs/git-lfs/config" + "github.com/git-lfs/git-lfs/git" + "github.com/rubyist/tracerx" +) + +type refUpdate struct { + git config.Environment + remote string + left *git.Ref + right *git.Ref +} + +func newRefUpdate(g config.Environment, remote string, l, r *git.Ref) *refUpdate { + return &refUpdate{ + git: g, + remote: remote, + left: l, + right: r, + } +} + +func (u *refUpdate) Left() *git.Ref { + return u.left +} + +func (u *refUpdate) LeftCommitish() string { + return refCommitish(u.Left()) +} + +func (u *refUpdate) Right() *git.Ref { + if u.right == nil { + u.right = defaultRemoteRef(u.git, u.remote, u.Left()) + } + return u.right +} + +// defaultRemoteRef returns the remote ref receiving a push based on the current +// repository config and local ref being pushed. +// +// See push.default rules in https://git-scm.com/docs/git-config +func defaultRemoteRef(g config.Environment, remote string, left *git.Ref) *git.Ref { + pushMode, _ := g.Get("push.default") + switch pushMode { + case "", "simple": + brRemote, _ := g.Get(fmt.Sprintf("branch.%s.remote", left.Name)) + if brRemote == remote { + // in centralized workflow, work like 'upstream' with an added safety to + // refuse to push if the upstream branch’s name is different from the + // local one. + return trackingRef(g, left) + } + + // When pushing to a remote that is different from the remote you normally + // pull from, work as current. + return left + case "upstream", "tracking": + // push the current branch back to the branch whose changes are usually + // integrated into the current branch + return trackingRef(g, left) + case "current": + // push the current branch to update a branch with the same name on the + // receiving end. + return left + default: + tracerx.Printf("WARNING: %q push mode not supported", pushMode) + return left + } +} + +func trackingRef(g config.Environment, left *git.Ref) *git.Ref { + if merge, ok := g.Get(fmt.Sprintf("branch.%s.merge", left.Name)); ok { + return git.ParseRef(merge, "") + } + return left +} + +func (u *refUpdate) RightCommitish() string { + return refCommitish(u.Right()) +} + +func refCommitish(r *git.Ref) string { + if len(r.Sha) > 0 { + return r.Sha + } + return r.Name +} diff --git a/commands/refs_test.go b/commands/refs_test.go new file mode 100644 index 00000000..cc98b3d8 --- /dev/null +++ b/commands/refs_test.go @@ -0,0 +1,75 @@ +package commands + +import ( + "testing" + + "github.com/git-lfs/git-lfs/config" + "github.com/git-lfs/git-lfs/git" + "github.com/stretchr/testify/assert" +) + +func TestRefUpdateDefault(t *testing.T) { + pushModes := []string{"simple", ""} + for _, pushMode := range pushModes { + cfg := config.NewFrom(config.Values{ + Git: map[string][]string{ + "push.default": []string{pushMode}, + "branch.left.remote": []string{"ignore"}, + "branch.left.merge": []string{"me"}, + }, + }) + + u := newRefUpdate(cfg.Git, "origin", git.ParseRef("refs/heads/left", ""), nil) + assert.Equal(t, "left", u.Right().Name, "pushmode=%q", pushMode) + assert.Equal(t, git.RefTypeLocalBranch, u.Right().Type, "pushmode=%q", pushMode) + } +} + +func TestRefUpdateTrackedDefault(t *testing.T) { + pushModes := []string{"simple", "upstream", "tracking", ""} + for _, pushMode := range pushModes { + cfg := config.NewFrom(config.Values{ + Git: map[string][]string{ + "push.default": []string{pushMode}, + "branch.left.remote": []string{"origin"}, + "branch.left.merge": []string{"refs/heads/tracked"}, + }, + }) + + u := newRefUpdate(cfg.Git, "origin", git.ParseRef("refs/heads/left", ""), nil) + assert.Equal(t, "tracked", u.Right().Name, "pushmode=%s", pushMode) + assert.Equal(t, git.RefTypeLocalBranch, u.Right().Type, "pushmode=%q", pushMode) + } +} + +func TestRefUpdateCurrentDefault(t *testing.T) { + cfg := config.NewFrom(config.Values{ + Git: map[string][]string{ + "push.default": []string{"current"}, + "branch.left.remote": []string{"origin"}, + "branch.left.merge": []string{"tracked"}, + }, + }) + + u := newRefUpdate(cfg.Git, "origin", git.ParseRef("refs/heads/left", ""), nil) + assert.Equal(t, "left", u.Right().Name) + assert.Equal(t, git.RefTypeLocalBranch, u.Right().Type) +} + +func TestRefUpdateExplicitLeftAndRight(t *testing.T) { + u := newRefUpdate(nil, "", git.ParseRef("refs/heads/left", "abc123"), git.ParseRef("refs/heads/right", "def456")) + assert.Equal(t, "left", u.Left().Name) + assert.Equal(t, "abc123", u.Left().Sha) + assert.Equal(t, "abc123", u.LeftCommitish()) + assert.Equal(t, "right", u.Right().Name) + assert.Equal(t, "def456", u.Right().Sha) + assert.Equal(t, "def456", u.RightCommitish()) + + u = newRefUpdate(nil, "", git.ParseRef("refs/heads/left", ""), git.ParseRef("refs/heads/right", "")) + assert.Equal(t, "left", u.Left().Name) + assert.Equal(t, "", u.Left().Sha) + assert.Equal(t, "left", u.LeftCommitish()) + assert.Equal(t, "right", u.Right().Name) + assert.Equal(t, "", u.Right().Sha) + assert.Equal(t, "right", u.RightCommitish()) +} diff --git a/commands/uploader.go b/commands/uploader.go index 060d3cc0..307268db 100644 --- a/commands/uploader.go +++ b/commands/uploader.go @@ -5,28 +5,42 @@ import ( "net/url" "os" "path/filepath" - "strconv" "strings" "sync" - "github.com/git-lfs/git-lfs/config" "github.com/git-lfs/git-lfs/errors" "github.com/git-lfs/git-lfs/lfs" - "github.com/git-lfs/git-lfs/lfsapi" - "github.com/git-lfs/git-lfs/locking" "github.com/git-lfs/git-lfs/progress" "github.com/git-lfs/git-lfs/tools" "github.com/git-lfs/git-lfs/tq" "github.com/rubyist/tracerx" ) -func uploadLeftOrAll(g *lfs.GitScanner, ctx *uploadContext, ref string) error { +func uploadForRefUpdates(ctx *uploadContext, updates []*refUpdate, pushAll bool) error { + gitscanner, err := ctx.buildGitScanner() + if err != nil { + return err + } + defer gitscanner.Close() + + verifyLocksForUpdates(ctx.lockVerifier, updates) + for _, update := range updates { + if err := uploadLeftOrAll(gitscanner, ctx, update, pushAll); err != nil { + return errors.Wrap(err, fmt.Sprintf("ref %s:", update.Left().Name)) + } + } + + ctx.Await() + return nil +} + +func uploadLeftOrAll(g *lfs.GitScanner, ctx *uploadContext, update *refUpdate, pushAll bool) error { if pushAll { - if err := g.ScanRefWithDeleted(ref, nil); err != nil { + if err := g.ScanRefWithDeleted(update.LeftCommitish(), nil); err != nil { return err } } else { - if err := g.ScanLeftToRemote(ref, nil); err != nil { + if err := g.ScanLeftToRemote(update.LeftCommitish(), nil); err != nil { return err } } @@ -46,18 +60,7 @@ type uploadContext struct { committerName string committerEmail string - trackedLocksMu *sync.Mutex - - // ALL verifiable locks - lockVerifyState verifyState - ourLocks map[string]locking.Lock - theirLocks map[string]locking.Lock - - // locks from ourLocks that were modified in this push - ownedLocks []locking.Lock - - // locks from theirLocks that were modified in this push - unownedLocks []locking.Lock + lockVerifier *lockVerifier // allowMissing specifies whether pushes containing missing/corrupt // pointers should allow pushing Git blobs @@ -68,102 +71,25 @@ type uploadContext struct { errMu sync.Mutex } -// Determines if a filename is lockable. Serves as a wrapper around theirLocks -// that implements GitScannerSet. -type gitScannerLockables struct { - m map[string]locking.Lock -} - -func (l *gitScannerLockables) Contains(name string) bool { - if l == nil { - return false - } - _, ok := l.m[name] - return ok -} - -type verifyState byte - -const ( - verifyStateUnknown verifyState = iota - verifyStateEnabled - verifyStateDisabled -) - -func newUploadContext(remote string, dryRun bool) *uploadContext { - cfg.CurrentRemote = remote - +func newUploadContext(dryRun bool) *uploadContext { + remote := cfg.PushRemote() + manifest := getTransferManifestOperationRemote("upload", remote) ctx := &uploadContext{ - Remote: remote, - Manifest: getTransferManifestOperationRemote("upload", remote), - DryRun: dryRun, - uploadedOids: tools.NewStringSet(), - gitfilter: lfs.NewGitFilter(cfg), - ourLocks: make(map[string]locking.Lock), - theirLocks: make(map[string]locking.Lock), - trackedLocksMu: new(sync.Mutex), - allowMissing: cfg.Git.Bool("lfs.allowincompletepush", true), + Remote: remote, + Manifest: manifest, + DryRun: dryRun, + uploadedOids: tools.NewStringSet(), + gitfilter: lfs.NewGitFilter(cfg), + lockVerifier: newLockVerifier(manifest), + allowMissing: cfg.Git.Bool("lfs.allowincompletepush", true), } ctx.meter = buildProgressMeter(ctx.DryRun) ctx.tq = newUploadQueue(ctx.Manifest, ctx.Remote, tq.WithProgress(ctx.meter), tq.DryRun(ctx.DryRun)) ctx.committerName, ctx.committerEmail = cfg.CurrentCommitter() - - // Do not check locks for standalone transfer, because there is no LFS - // server to ask. - if ctx.Manifest.IsStandaloneTransfer() { - ctx.lockVerifyState = verifyStateDisabled - return ctx - } - - ourLocks, theirLocks, verifyState := verifyLocks(remote) - ctx.lockVerifyState = verifyState - for _, l := range theirLocks { - ctx.theirLocks[l.Path] = l - } - for _, l := range ourLocks { - ctx.ourLocks[l.Path] = l - } - return ctx } -func verifyLocks(remote string) (ours, theirs []locking.Lock, st verifyState) { - endpoint := getAPIClient().Endpoints.Endpoint("upload", remote) - state := getVerifyStateFor(endpoint) - if state == verifyStateDisabled { - return - } - - lockClient := newLockClient(remote) - - ours, theirs, err := lockClient.VerifiableLocks(0) - if err != nil { - if errors.IsNotImplementedError(err) { - disableFor(endpoint) - } else if state == verifyStateUnknown || state == verifyStateEnabled { - if errors.IsAuthError(err) { - if state == verifyStateUnknown { - Error("WARNING: Authentication error: %s", err) - } else if state == verifyStateEnabled { - Exit("ERROR: Authentication error: %s", err) - } - } else { - Print("Remote %q does not support the LFS locking API. Consider disabling it with:", remote) - Print(" $ git config lfs.%s.locksverify false", endpoint.Url) - if state == verifyStateEnabled { - ExitWithError(err) - } - } - } - } else if state == verifyStateUnknown { - Print("Locking support detected on remote %q. Consider enabling it with:", remote) - Print(" $ git config lfs.%s.locksverify true", endpoint.Url) - } - - return ours, theirs, state -} - func (c *uploadContext) scannerError() error { c.errMu.Lock() defer c.errMu.Unlock() @@ -191,15 +117,8 @@ func (c *uploadContext) buildGitScanner() (*lfs.GitScanner, error) { } }) - gitscanner.FoundLockable = func(name string) { - if lock, ok := c.theirLocks[name]; ok { - c.trackedLocksMu.Lock() - c.unownedLocks = append(c.unownedLocks, lock) - c.trackedLocksMu.Unlock() - } - } - - gitscanner.PotentialLockables = &gitScannerLockables{m: c.theirLocks} + gitscanner.FoundLockable = func(n string) { c.lockVerifier.LockedByThem(n) } + gitscanner.PotentialLockables = c.lockVerifier return gitscanner, gitscanner.RemoteForPush(c.Remote) } @@ -239,11 +158,7 @@ func (c *uploadContext) prepareUpload(unfiltered ...*lfs.WrappedPointer) (*tq.Tr // current committer. var canUpload bool = true - if lock, ok := c.theirLocks[p.Name]; ok { - c.trackedLocksMu.Lock() - c.unownedLocks = append(c.unownedLocks, lock) - c.trackedLocksMu.Unlock() - + if c.lockVerifier.LockedByThem(p.Name) { // If the verification state is enabled, this failed // locks verification means that the push should fail. // @@ -252,14 +167,10 @@ func (c *uploadContext) prepareUpload(unfiltered ...*lfs.WrappedPointer) (*tq.Tr // // If the state is undefined, the verification error is // sent as a warning and the user can upload. - canUpload = c.lockVerifyState != verifyStateEnabled + canUpload = !c.lockVerifier.Enabled() } - if lock, ok := c.ourLocks[p.Name]; ok { - c.trackedLocksMu.Lock() - c.ownedLocks = append(c.ownedLocks, lock) - c.trackedLocksMu.Unlock() - } + c.lockVerifier.LockedByUs(p.Name) if canUpload { // estimate in meter early (even if it's not going into @@ -348,25 +259,23 @@ func (c *uploadContext) Await() { os.Exit(2) } - c.trackedLocksMu.Lock() - if ul := len(c.unownedLocks); ul > 0 { - Print("Unable to push %d locked file(s):", ul) - for _, unowned := range c.unownedLocks { - Print("* %s - %s", unowned.Path, unowned.Owner) + if c.lockVerifier.HasUnownedLocks() { + Print("Unable to push locked files:") + for _, unowned := range c.lockVerifier.UnownedLocks() { + Print("* %s - %s", unowned.Path(), unowned.Owners()) } - if c.lockVerifyState == verifyStateEnabled { + if c.lockVerifier.Enabled() { Exit("ERROR: Cannot update locked files.") } else { Error("WARNING: The above files would have halted this push.") } - } else if len(c.ownedLocks) > 0 { - Print("Consider unlocking your own locked file(s): (`git lfs unlock `)") - for _, owned := range c.ownedLocks { - Print("* %s", owned.Path) + } else if c.lockVerifier.HasOwnedLocks() { + Print("Consider unlocking your own locked files: (`git lfs unlock `)") + for _, owned := range c.lockVerifier.OwnedLocks() { + Print("* %s", owned.Path()) } } - c.trackedLocksMu.Unlock() } var ( @@ -440,33 +349,13 @@ func (c *uploadContext) ensureFile(smudgePath, cleanPath string) error { return nil } -// getVerifyStateFor returns whether or not lock verification is enabled for the -// given "endpoint". If no state has been explicitly set, an "unknown" state -// will be returned instead. -func getVerifyStateFor(endpoint lfsapi.Endpoint) verifyState { - uc := config.NewURLConfig(cfg.Git) - - v, ok := uc.Get("lfs", endpoint.Url, "locksverify") - if !ok { - if supportsLockingAPI(endpoint) { - return verifyStateEnabled - } - return verifyStateUnknown - } - - if enabled, _ := strconv.ParseBool(v); enabled { - return verifyStateEnabled - } - return verifyStateDisabled -} - -// supportsLockingAPI returns whether or not a given lfsapi.Endpoint "e" -// is known to support the LFS locking API by whether or not its hostname is -// included in the list above. -func supportsLockingAPI(e lfsapi.Endpoint) bool { - u, err := url.Parse(e.Url) +// supportsLockingAPI returns whether or not a given url is known to support +// the LFS locking API by whether or not its hostname is included in the list +// above. +func supportsLockingAPI(rawurl string) bool { + u, err := url.Parse(rawurl) if err != nil { - tracerx.Printf("commands: unable to parse %q to determine locking support: %v", e.Url, err) + tracerx.Printf("commands: unable to parse %q to determine locking support: %v", rawurl, err) return false } @@ -482,10 +371,10 @@ func supportsLockingAPI(e lfsapi.Endpoint) bool { // disableFor disables lock verification for the given lfsapi.Endpoint, // "endpoint". -func disableFor(endpoint lfsapi.Endpoint) error { - tracerx.Printf("commands: disabling lock verification for %q", endpoint.Url) +func disableFor(rawurl string) error { + tracerx.Printf("commands: disabling lock verification for %q", rawurl) - key := strings.Join([]string{"lfs", endpoint.Url, "locksverify"}, ".") + key := strings.Join([]string{"lfs", rawurl, "locksverify"}, ".") _, err := cfg.SetGitLocalKey(key, "false") return err diff --git a/commands/uploader_test.go b/commands/uploader_test.go index aae644e1..e5fd0637 100644 --- a/commands/uploader_test.go +++ b/commands/uploader_test.go @@ -3,7 +3,6 @@ package commands import ( "testing" - "github.com/git-lfs/git-lfs/lfsapi" "github.com/stretchr/testify/assert" ) @@ -13,11 +12,7 @@ type LockingSupportTestCase struct { } func (l *LockingSupportTestCase) Assert(t *testing.T) { - ep := lfsapi.Endpoint{ - Url: l.Given, - } - - assert.Equal(t, l.ExpectedToMatch, supportsLockingAPI(ep)) + assert.Equal(t, l.ExpectedToMatch, supportsLockingAPI(l.Given)) } func TestSupportedLockingHosts(t *testing.T) { diff --git a/config/config.go b/config/config.go index 4cb45197..089565b2 100644 --- a/config/config.go +++ b/config/config.go @@ -32,12 +32,15 @@ type Configuration struct { // configuration. Git Environment - CurrentRemote string + currentRemote *string + pushRemote *string // gitConfig can fetch or modify the current Git config and track the Git // version. gitConfig *git.Configuration + ref *git.Ref + remoteRef *git.Ref fs *fs.Filesystem gitDir *string workDir string @@ -54,9 +57,8 @@ func New() *Configuration { func NewIn(workdir, gitdir string) *Configuration { gitConf := git.NewConfig(workdir, gitdir) c := &Configuration{ - CurrentRemote: defaultRemote, - Os: EnvironmentOf(NewOsFetcher()), - gitConfig: gitConf, + Os: EnvironmentOf(NewOsFetcher()), + gitConfig: gitConf, } if len(gitConf.WorkDir) > 0 { @@ -106,9 +108,8 @@ type Values struct { // This method should only be used during testing. func NewFrom(v Values) *Configuration { c := &Configuration{ - CurrentRemote: defaultRemote, - Os: EnvironmentOf(mapFetcher(v.Os)), - gitConfig: git.NewConfig("", ""), + Os: EnvironmentOf(mapFetcher(v.Os)), + gitConfig: git.NewConfig("", ""), } c.Git = &delayedEnvironment{ callback: func() Environment { @@ -151,6 +152,90 @@ func (c *Configuration) FetchExcludePaths() []string { return tools.CleanPaths(patterns, ",") } +func (c *Configuration) CurrentRef() *git.Ref { + c.loading.Lock() + defer c.loading.Unlock() + if c.ref == nil { + r, err := git.CurrentRef() + if err != nil { + tracerx.Printf("Error loading current ref: %s", err) + c.ref = &git.Ref{} + } else { + c.ref = r + } + } + return c.ref +} + +func (c *Configuration) IsDefaultRemote() bool { + return c.Remote() == defaultRemote +} + +// Remote returns the default remote based on: +// 1. The currently tracked remote branch, if present +// 2. Any other SINGLE remote defined in .git/config +// 3. Use "origin" as a fallback. +// Results are cached after the first hit. +func (c *Configuration) Remote() string { + ref := c.CurrentRef() + + c.loading.Lock() + defer c.loading.Unlock() + + if c.currentRemote == nil { + if len(ref.Name) == 0 { + c.currentRemote = &defaultRemote + return defaultRemote + } + + if remote, ok := c.Git.Get(fmt.Sprintf("branch.%s.remote", ref.Name)); ok { + // try tracking remote + c.currentRemote = &remote + } else if remotes := c.Remotes(); len(remotes) == 1 { + // use only remote if there is only 1 + c.currentRemote = &remotes[0] + } else { + // fall back to default :( + c.currentRemote = &defaultRemote + } + } + return *c.currentRemote +} + +func (c *Configuration) PushRemote() string { + ref := c.CurrentRef() + c.loading.Lock() + defer c.loading.Unlock() + + if c.pushRemote == nil { + if remote, ok := c.Git.Get(fmt.Sprintf("branch.%s.pushRemote", ref.Name)); ok { + c.pushRemote = &remote + } else if remote, ok := c.Git.Get("remote.pushDefault"); ok { + c.pushRemote = &remote + } else { + c.loading.Unlock() + remote := c.Remote() + c.loading.Lock() + + c.pushRemote = &remote + } + } + + return *c.pushRemote +} + +func (c *Configuration) SetValidRemote(name string) error { + if err := git.ValidateRemote(name); err != nil { + return err + } + c.SetRemote(name) + return nil +} + +func (c *Configuration) SetRemote(name string) { + c.currentRemote = &name +} + func (c *Configuration) Remotes() []string { c.loadGitConfig() return c.remotes diff --git a/config/config_test.go b/config/config_test.go index c45083ba..66464f0f 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -3,9 +3,62 @@ package config import ( "testing" + "github.com/git-lfs/git-lfs/git" "github.com/stretchr/testify/assert" ) +func TestRemoteDefault(t *testing.T) { + cfg := NewFrom(Values{ + Git: map[string][]string{ + "branch.unused.remote": []string{"a"}, + "branch.unused.pushRemote": []string{"b"}, + }, + }) + assert.Equal(t, "origin", cfg.Remote()) + assert.Equal(t, "origin", cfg.PushRemote()) +} + +func TestRemoteBranchConfig(t *testing.T) { + cfg := NewFrom(Values{ + Git: map[string][]string{ + "branch.master.remote": []string{"a"}, + "branch.other.pushRemote": []string{"b"}, + }, + }) + cfg.ref = &git.Ref{Name: "master"} + + assert.Equal(t, "a", cfg.Remote()) + assert.Equal(t, "a", cfg.PushRemote()) +} + +func TestRemotePushDefault(t *testing.T) { + cfg := NewFrom(Values{ + Git: map[string][]string{ + "branch.master.remote": []string{"a"}, + "remote.pushDefault": []string{"b"}, + "branch.other.pushRemote": []string{"c"}, + }, + }) + cfg.ref = &git.Ref{Name: "master"} + + assert.Equal(t, "a", cfg.Remote()) + assert.Equal(t, "b", cfg.PushRemote()) +} + +func TestRemoteBranchPushDefault(t *testing.T) { + cfg := NewFrom(Values{ + Git: map[string][]string{ + "branch.master.remote": []string{"a"}, + "remote.pushDefault": []string{"b"}, + "branch.master.pushRemote": []string{"c"}, + }, + }) + cfg.ref = &git.Ref{Name: "master"} + + assert.Equal(t, "a", cfg.Remote()) + assert.Equal(t, "c", cfg.PushRemote()) +} + func TestBasicTransfersOnlySetValue(t *testing.T) { cfg := NewFrom(Values{ Git: map[string][]string{ diff --git a/config/environment.go b/config/environment.go index 172bcf92..73b8e513 100644 --- a/config/environment.go +++ b/config/environment.go @@ -65,13 +65,58 @@ func (e *environment) GetAll(key string) []string { return e.Fetcher.GetAll(key) } -func (e *environment) Bool(key string, def bool) (val bool) { +func (e *environment) Bool(key string, def bool) bool { s, _ := e.Fetcher.Get(key) - if len(s) == 0 { + return Bool(s, def) +} + +func (e *environment) Int(key string, def int) int { + s, _ := e.Fetcher.Get(key) + return Int(s, def) +} + +func (e *environment) All() map[string][]string { + return e.Fetcher.All() +} + +// Int returns the int value associated with the given value, or the value +// "def", if the value is blank. +// +// To convert from a the string value attached to a given key, +// `strconv.Atoi(val)` is called. If `Atoi` returned a non-nil error, +// then the value "def" will be returned instead. +// +// Otherwise, if the value was converted `string -> int` successfully, +// then it will be returned wholesale. +func Int(value string, def int) int { + if len(value) == 0 { return def } - switch strings.ToLower(s) { + i, err := strconv.Atoi(value) + if err != nil { + return def + } + + return i +} + +// Bool returns the boolean state associated with the given value, or the +// value "def", if the value is blank. +// +// The "boolean state associated with a given key" is defined as the +// case-insensitive string comparison with the following: +// +// 1) true if... +// "true", "1", "on", "yes", or "t" +// 2) false if... +// "false", "0", "off", "no", "f", or otherwise. +func Bool(value string, def bool) bool { + if len(value) == 0 { + return def + } + + switch strings.ToLower(value) { case "true", "1", "on", "yes", "t": return true case "false", "0", "off", "no", "f": @@ -80,21 +125,3 @@ func (e *environment) Bool(key string, def bool) (val bool) { return false } } - -func (e *environment) Int(key string, def int) (val int) { - s, _ := e.Fetcher.Get(key) - if len(s) == 0 { - return def - } - - i, err := strconv.Atoi(s) - if err != nil { - return def - } - - return i -} - -func (e *environment) All() map[string][]string { - return e.Fetcher.All() -} diff --git a/config/url_config.go b/config/url_config.go index 835ac8cb..21663a79 100644 --- a/config/url_config.go +++ b/config/url_config.go @@ -50,6 +50,11 @@ func (c *URLConfig) GetAll(prefix, rawurl, key string) []string { return c.git.GetAll(strings.Join([]string{prefix, key}, ".")) } +func (c *URLConfig) Bool(prefix, rawurl, key string, def bool) bool { + s, _ := c.Get(prefix, rawurl, key) + return Bool(s, def) +} + func (c *URLConfig) getAll(prefix, rawurl, key string) []string { hosts, paths := c.hostsAndPaths(rawurl) diff --git a/docs/man/git-lfs-ls-files.1.ronn b/docs/man/git-lfs-ls-files.1.ronn index 3d64d46a..fe4fa746 100644 --- a/docs/man/git-lfs-ls-files.1.ronn +++ b/docs/man/git-lfs-ls-files.1.ronn @@ -9,6 +9,7 @@ git-lfs-ls-files(1) -- Show information about Git LFS files in the index and wor Display paths of Git LFS files that are found in the tree at the given reference. If no reference is given, scan the currently checked-out branch. +An asterisk (*) after the OID indicates a LFS pointer, a minus (-) a full object. ## OPTIONS diff --git a/docs/man/git-lfs-migrate.1.ronn b/docs/man/git-lfs-migrate.1.ronn index 82e37b05..1d7ca703 100644 --- a/docs/man/git-lfs-migrate.1.ronn +++ b/docs/man/git-lfs-migrate.1.ronn @@ -27,6 +27,11 @@ git-lfs-migrate(1) - Migrate history to or from git-lfs * `--exclude-ref`=: See [INCLUDE AND EXCLUDE (REFS)]. +* `--skip-fetch`: + Assumes that the known set of remote references is complete, and should not + be refreshed when determining the set of "un-pushed" commits to migrate. Has + no effect when combined with `--include-ref` or `--exclude-ref`. + * `--everything`: See [INCLUDE AND EXCLUDE (REFS)]. diff --git a/docs/man/git-lfs-uninstall.1.ronn b/docs/man/git-lfs-uninstall.1.ronn index c4a7fb1f..b6e5de09 100644 --- a/docs/man/git-lfs-uninstall.1.ronn +++ b/docs/man/git-lfs-uninstall.1.ronn @@ -12,6 +12,12 @@ Perform the following actions to remove the Git LFS configuration: * Remove the "lfs" clean and smudge filters from the global Git config. * Uninstall the Git LFS pre-push hook if run from inside a Git repository. +## OPTIONS + +* --local: + Removes the "lfs" smudge and clean filters from the local repository's git + config, instead of the global git config (~/.gitconfig). + ## SEE ALSO git-lfs-install(1). diff --git a/docs/man/git-lfs.1.ronn b/docs/man/git-lfs.1.ronn index 6d61de99..74a7dd1a 100644 --- a/docs/man/git-lfs.1.ronn +++ b/docs/man/git-lfs.1.ronn @@ -59,6 +59,8 @@ commands and low level ("plumbing") commands. Show the status of Git LFS files in the working tree. * git-lfs-track(1): View or add Git LFS paths to Git attributes. +* git-lfs-uninstall(1): + Uninstall Git LFS by removing hooks and smudge/clean filter configuration. * git-lfs-unlock(1): Remove "locked" setting for a file on the Git LFS server. * git-lfs-untrack(1): diff --git a/git/attribs.go b/git/attribs.go index 44419328..067bf42d 100644 --- a/git/attribs.go +++ b/git/attribs.go @@ -36,7 +36,7 @@ func (s *AttributeSource) String() string { // GetAttributePaths returns a list of entries in .gitattributes which are // configured with the filter=lfs attribute -// workingDIr is the root of the working copy +// workingDir is the root of the working copy // gitDir is the root of the git repo func GetAttributePaths(workingDir, gitDir string) []AttributePath { paths := make([]AttributePath, 0) @@ -56,7 +56,11 @@ func GetAttributePaths(workingDir, gitDir string) []AttributePath { scanner.Split(le.ScanLines) for scanner.Scan() { - line := scanner.Text() + line := strings.TrimSpace(scanner.Text()) + + if strings.HasPrefix(line, "#") { + continue + } // Check for filter=lfs (signifying that LFS is tracking // this file) or "lockable", which indicates that the diff --git a/git/filter_process_scanner.go b/git/filter_process_scanner.go index f56059a3..8af20c7c 100644 --- a/git/filter_process_scanner.go +++ b/git/filter_process_scanner.go @@ -172,8 +172,6 @@ func (o *FilterProcessScanner) Err() error { return o.err } // will read the body of the request. Since the body is _not_ offset, one // request should be read in its entirety before consuming the next request. func (o *FilterProcessScanner) readRequest() (*Request, error) { - tracerx.Printf("Read filter-process request.") - requestList, err := o.pl.readPacketList() if err != nil { return nil, err diff --git a/git/git.go b/git/git.go index 983a0834..bd5b931f 100644 --- a/git/git.go +++ b/git/git.go @@ -54,15 +54,36 @@ func (t RefType) Prefix() (string, bool) { return "refs/tags", true case RefTypeRemoteTag: return "refs/remotes/tags", true - case RefTypeHEAD: - return "", false - case RefTypeOther: - return "", false default: - panic(fmt.Sprintf("git: unknown RefType %d", t)) + return "", false } } +func ParseRef(absRef, sha string) *Ref { + r := &Ref{Sha: sha} + if strings.HasPrefix(absRef, "refs/heads/") { + r.Name = absRef[11:] + r.Type = RefTypeLocalBranch + } else if strings.HasPrefix(absRef, "refs/tags/") { + r.Name = absRef[10:] + r.Type = RefTypeLocalTag + } else if strings.HasPrefix(absRef, "refs/remotes/tags/") { + r.Name = absRef[18:] + r.Type = RefTypeRemoteTag + } else if strings.HasPrefix(absRef, "refs/remotes/") { + r.Name = absRef[13:] + r.Type = RefTypeRemoteBranch + } else { + r.Name = absRef + if absRef == "HEAD" { + r.Type = RefTypeHEAD + } else { + r.Type = RefTypeOther + } + } + return r +} + // A git reference (branch, tag etc) type Ref struct { Name string @@ -70,6 +91,24 @@ type Ref struct { Sha string } +// Refspec returns the fully-qualified reference name (including remote), i.e., +// for a remote branch called 'my-feature' on remote 'origin', this function +// will return: +// +// refs/remotes/origin/my-feature +func (r *Ref) Refspec() string { + if r == nil { + return "" + } + + prefix, ok := r.Type.Prefix() + if ok { + return fmt.Sprintf("%s/%s", prefix, r.Name) + } + + return r.Name +} + // Some top level information about a commit (only first line of message) type CommitSummary struct { Sha string @@ -242,19 +281,6 @@ func (c *Configuration) CurrentRemoteRef() (*Ref, error) { return ResolveRef(remoteref) } -// RemoteForCurrentBranch returns the name of the remote that the current branch is tracking -func (c *Configuration) RemoteForCurrentBranch() (string, error) { - ref, err := CurrentRef() - if err != nil { - return "", err - } - remote := c.RemoteForBranch(ref.Name) - if remote == "" { - return "", fmt.Errorf("remote not found for branch %q", ref.Name) - } - return remote, nil -} - // RemoteRefForCurrentBranch returns the full remote ref (refs/remotes/{remote}/{remotebranch}) // that the current branch is tracking. func (c *Configuration) RemoteRefNameForCurrentBranch() (string, error) { @@ -361,14 +387,7 @@ func UpdateRef(ref *Ref, to []byte, reason string) error { // reflog entry, if a "reason" was provided). It operates within the given // working directory "wd". It returns an error if any were encountered. func UpdateRefIn(wd string, ref *Ref, to []byte, reason string) error { - var refspec string - if prefix, ok := ref.Type.Prefix(); ok { - refspec = fmt.Sprintf("%s/%s", prefix, ref.Name) - } else { - refspec = ref.Name - } - - args := []string{"update-ref", refspec, hex.EncodeToString(to)} + args := []string{"update-ref", ref.Refspec(), hex.EncodeToString(to)} if len(reason) > 0 { args = append(args, "-m", reason) } @@ -423,39 +442,6 @@ func ValidateRemoteURL(remote string) error { } } -// DefaultRemote returns the default remote based on: -// 1. The currently tracked remote branch, if present -// 2. "origin", if defined -// 3. Any other SINGLE remote defined in .git/config -// Returns an error if all of these fail, i.e. no tracked remote branch, no -// "origin", and either no remotes defined or 2+ non-"origin" remotes -func (c *Configuration) DefaultRemote() (string, error) { - tracked, err := c.RemoteForCurrentBranch() - if err == nil { - return tracked, nil - } - - // Otherwise, check what remotes are defined - remotes, err := RemoteList() - if err != nil { - return "", err - } - switch len(remotes) { - case 0: - return "", errors.New("No remotes defined") - case 1: // always use a single remote whether it's origin or otherwise - return remotes[0], nil - default: - for _, remote := range remotes { - // Use origin if present - if remote == "origin" { - return remote, nil - } - } - } - return "", errors.New("Unable to pick default remote, too ambiguous") -} - func UpdateIndexFromStdin() *subprocess.Cmd { return git("update-index", "-q", "--refresh", "--stdin") } diff --git a/git/git_test.go b/git/git_test.go index e2e9a8d3..afd83b82 100644 --- a/git/git_test.go +++ b/git/git_test.go @@ -13,6 +13,70 @@ import ( "github.com/stretchr/testify/assert" ) +func TestRefString(t *testing.T) { + const sha = "0000000000000000000000000000000000000000" + for s, r := range map[string]*Ref{ + "refs/heads/master": &Ref{ + Name: "master", + Type: RefTypeLocalBranch, + Sha: sha, + }, + "refs/remotes/origin/master": &Ref{ + Name: "origin/master", + Type: RefTypeRemoteBranch, + Sha: sha, + }, + "refs/remotes/tags/v1.0.0": &Ref{ + Name: "v1.0.0", + Type: RefTypeRemoteTag, + Sha: sha, + }, + "refs/tags/v1.0.0": &Ref{ + Name: "v1.0.0", + Type: RefTypeLocalTag, + Sha: sha, + }, + "HEAD": &Ref{ + Name: "HEAD", + Type: RefTypeHEAD, + Sha: sha, + }, + "other": &Ref{ + Name: "other", + Type: RefTypeOther, + Sha: sha, + }, + } { + assert.Equal(t, s, r.Refspec()) + } +} + +func TestParseRefs(t *testing.T) { + tests := map[string]RefType{ + "refs/heads": RefTypeLocalBranch, + "refs/tags": RefTypeLocalTag, + "refs/remotes/tags": RefTypeRemoteTag, + "refs/remotes": RefTypeRemoteBranch, + } + + for prefix, expectedType := range tests { + r := ParseRef(prefix+"/branch", "abc123") + assert.Equal(t, "abc123", r.Sha, "prefix: "+prefix) + assert.Equal(t, "branch", r.Name, "prefix: "+prefix) + assert.Equal(t, expectedType, r.Type, "prefix: "+prefix) + } + + r := ParseRef("refs/foo/branch", "abc123") + assert.Equal(t, "abc123", r.Sha, "prefix: refs/foo") + assert.Equal(t, "refs/foo/branch", r.Name, "prefix: refs/foo") + assert.Equal(t, RefTypeOther, r.Type, "prefix: refs/foo") + + r = ParseRef("HEAD", "abc123") + assert.Equal(t, "abc123", r.Sha, "prefix: HEAD") + assert.Equal(t, "HEAD", r.Name, "prefix: HEAD") + assert.Equal(t, RefTypeHEAD, r.Type, "prefix: HEAD") +} + func TestCurrentRefAndCurrentRemoteRef(t *testing.T) { repo := test.NewRepo(t) repo.Pushd() @@ -71,10 +135,6 @@ func TestCurrentRefAndCurrentRemoteRef(t *testing.T) { assert.Nil(t, err) assert.Equal(t, "refs/remotes/origin/someremotebranch", refname) - remote, err := gitConf.RemoteForCurrentBranch() - assert.Nil(t, err) - assert.Equal(t, "origin", remote) - ref, err = ResolveRef(outputs[2].Sha) assert.Nil(t, err) assert.Equal(t, &Ref{outputs[2].Sha, RefTypeOther, outputs[2].Sha}, ref) @@ -563,16 +623,3 @@ func TestRefTypeKnownPrefixes(t *testing.T) { assert.Equal(t, expected.Ok, ok) } } - -func TestRefTypeUnknownPrefix(t *testing.T) { - defer func() { - if err := recover(); err != nil { - assert.Equal(t, "git: unknown RefType -1", err) - } else { - t.Fatal("git: expected panic() from RefType.Prefix()") - } - }() - - unknown := RefType(-1) - unknown.Prefix() -} diff --git a/git/rev_list_scanner.go b/git/rev_list_scanner.go index 294dbe5a..58f28457 100644 --- a/git/rev_list_scanner.go +++ b/git/rev_list_scanner.go @@ -225,7 +225,7 @@ func NewRevListScanner(include, excluded []string, opt *ScanRefsOptions) (*RevLi // occurred. func revListArgs(include, exclude []string, opt *ScanRefsOptions) (io.Reader, []string, error) { var stdin io.Reader - args := []string{"rev-list"} + args := []string{"rev-list", "--stdin"} if !opt.CommitsOnly { args = append(args, "--objects") } @@ -246,15 +246,16 @@ func revListArgs(include, exclude []string, opt *ScanRefsOptions) (io.Reader, [] args = append(args, "--do-walk") } - args = append(args, includeExcludeShas(include, exclude)...) + stdin = strings.NewReader(strings.Join( + includeExcludeShas(include, exclude), "\n")) case ScanAllMode: args = append(args, "--all") case ScanLeftToRemoteMode: if len(opt.SkippedRefs) == 0 { - args = append(args, includeExcludeShas(include, exclude)...) args = append(args, "--not", "--remotes="+opt.Remote) + stdin = strings.NewReader(strings.Join( + includeExcludeShas(include, exclude), "\n")) } else { - args = append(args, "--stdin") stdin = strings.NewReader(strings.Join( append(includeExcludeShas(include, exclude), opt.SkippedRefs...), "\n"), ) diff --git a/git/rev_list_scanner_test.go b/git/rev_list_scanner_test.go index ab29417c..70bd9985 100644 --- a/git/rev_list_scanner_test.go +++ b/git/rev_list_scanner_test.go @@ -4,13 +4,13 @@ import ( "bufio" "encoding/hex" "errors" + "fmt" "io/ioutil" "strings" "sync/atomic" "testing" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) type ArgsTestCase struct { @@ -32,11 +32,7 @@ func (c *ArgsTestCase) Assert(t *testing.T) { assert.Nil(t, err) } - require.Equal(t, len(c.ExpectedArgs), len(args)) - for i := 0; i < len(c.ExpectedArgs); i++ { - assert.Equal(t, c.ExpectedArgs[i], args[i], - "element #%d not equal: wanted %q, got %q", i, c.ExpectedArgs[i], args[i]) - } + assert.EqualValues(t, c.ExpectedArgs, args) if stdin != nil { b, err := ioutil.ReadAll(stdin) @@ -60,34 +56,38 @@ func TestRevListArgs(t *testing.T) { Mode: ScanRefsMode, SkipDeletedBlobs: false, }, - ExpectedArgs: []string{"rev-list", "--objects", "--do-walk", s1, "^" + s2, "--"}, + ExpectedStdin: fmt.Sprintf("%s\n^%s", s1, s2), + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--do-walk", "--"}, }, "scan refs not deleted, left and right": { Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{ Mode: ScanRefsMode, SkipDeletedBlobs: true, }, - ExpectedArgs: []string{"rev-list", "--objects", "--no-walk", s1, "^" + s2, "--"}, + ExpectedStdin: fmt.Sprintf("%s\n^%s", s1, s2), + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--no-walk", "--"}, }, "scan refs deleted, left only": { Include: []string{s1}, Opt: &ScanRefsOptions{ Mode: ScanRefsMode, SkipDeletedBlobs: false, }, - ExpectedArgs: []string{"rev-list", "--objects", "--do-walk", s1, "--"}, + ExpectedStdin: s1, + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--do-walk", "--"}, }, "scan refs not deleted, left only": { Include: []string{s1}, Opt: &ScanRefsOptions{ Mode: ScanRefsMode, SkipDeletedBlobs: true, }, - ExpectedArgs: []string{"rev-list", "--objects", "--no-walk", s1, "--"}, + ExpectedStdin: s1, + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--no-walk", "--"}, }, "scan all": { Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{ Mode: ScanAllMode, }, - ExpectedArgs: []string{"rev-list", "--objects", "--all", "--"}, + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--all", "--"}, }, "scan left to remote, no skipped refs": { Include: []string{s1}, Opt: &ScanRefsOptions{ @@ -95,7 +95,8 @@ func TestRevListArgs(t *testing.T) { Remote: "origin", SkippedRefs: []string{}, }, - ExpectedArgs: []string{"rev-list", "--objects", s1, "--not", "--remotes=origin", "--"}, + ExpectedStdin: s1, + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--not", "--remotes=origin", "--"}, }, "scan left to remote, skipped refs": { Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{ @@ -103,7 +104,7 @@ func TestRevListArgs(t *testing.T) { Remote: "origin", SkippedRefs: []string{"a", "b", "c"}, }, - ExpectedArgs: []string{"rev-list", "--objects", "--stdin", "--"}, + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--"}, ExpectedStdin: s1 + "\n^" + s2 + "\na\nb\nc", }, "scan unknown type": { @@ -117,35 +118,40 @@ func TestRevListArgs(t *testing.T) { Mode: ScanRefsMode, Order: DateRevListOrder, }, - ExpectedArgs: []string{"rev-list", "--objects", "--date-order", "--do-walk", s1, "^" + s2, "--"}, + ExpectedStdin: fmt.Sprintf("%s\n^%s", s1, s2), + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--date-order", "--do-walk", "--"}, }, "scan author date order": { Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{ Mode: ScanRefsMode, Order: AuthorDateRevListOrder, }, - ExpectedArgs: []string{"rev-list", "--objects", "--author-date-order", "--do-walk", s1, "^" + s2, "--"}, + ExpectedStdin: fmt.Sprintf("%s\n^%s", s1, s2), + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--author-date-order", "--do-walk", "--"}, }, "scan topo order": { Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{ Mode: ScanRefsMode, Order: TopoRevListOrder, }, - ExpectedArgs: []string{"rev-list", "--objects", "--topo-order", "--do-walk", s1, "^" + s2, "--"}, + ExpectedStdin: fmt.Sprintf("%s\n^%s", s1, s2), + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--topo-order", "--do-walk", "--"}, }, "scan commits only": { Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{ Mode: ScanRefsMode, CommitsOnly: true, }, - ExpectedArgs: []string{"rev-list", "--do-walk", s1, "^" + s2, "--"}, + ExpectedStdin: fmt.Sprintf("%s\n^%s", s1, s2), + ExpectedArgs: []string{"rev-list", "--stdin", "--do-walk", "--"}, }, "scan reverse": { Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{ Mode: ScanRefsMode, Reverse: true, }, - ExpectedArgs: []string{"rev-list", "--objects", "--reverse", "--do-walk", s1, "^" + s2, "--"}, + ExpectedStdin: fmt.Sprintf("%s\n^%s", s1, s2), + ExpectedArgs: []string{"rev-list", "--stdin", "--objects", "--reverse", "--do-walk", "--"}, }, } { t.Run(desc, c.Assert) diff --git a/lfs/gitfilter_smudge.go b/lfs/gitfilter_smudge.go index bb2c4cde..e3c82f61 100644 --- a/lfs/gitfilter_smudge.go +++ b/lfs/gitfilter_smudge.go @@ -81,7 +81,8 @@ func (f *GitFilter) downloadFile(writer io.Writer, ptr *Pointer, workingfile, me // // Either way, forward it into the *tq.TransferQueue so that updates are // sent over correctly. - q := tq.NewTransferQueue(tq.Download, manifest, "", tq.WithProgressCallback(cb)) + + q := tq.NewTransferQueue(tq.Download, manifest, f.cfg.Remote(), tq.WithProgressCallback(cb)) q.Add(filepath.Base(workingfile), mediafile, ptr.Oid, ptr.Size) q.Wait() diff --git a/lfs/lfs.go b/lfs/lfs.go index 05cc2a2a..eae67ca0 100644 --- a/lfs/lfs.go +++ b/lfs/lfs.go @@ -25,8 +25,8 @@ func Environ(cfg *config.Configuration, manifest *tq.Manifest) []string { panic(err.Error()) } - download := api.Endpoints.AccessFor(api.Endpoints.Endpoint("download", cfg.CurrentRemote).Url) - upload := api.Endpoints.AccessFor(api.Endpoints.Endpoint("upload", cfg.CurrentRemote).Url) + download := api.Endpoints.AccessFor(api.Endpoints.Endpoint("download", cfg.Remote()).Url) + upload := api.Endpoints.AccessFor(api.Endpoints.Endpoint("upload", cfg.PushRemote()).Url) dltransfers := manifest.GetDownloadAdapterNames() sort.Strings(dltransfers) diff --git a/lfsapi/auth.go b/lfsapi/auth.go index b64ff662..d4bcc4ad 100644 --- a/lfsapi/auth.go +++ b/lfsapi/auth.go @@ -20,23 +20,13 @@ var ( defaultEndpointFinder = NewEndpointFinder(nil) ) +// DoWithAuth sends an HTTP request to get an HTTP response. It attempts to add +// authentication from netrc or git's credential helpers if necessary, +// supporting basic and ntlm authentication. func (c *Client) DoWithAuth(remote string, req *http.Request) (*http.Response, error) { - credHelper := c.Credentials - if credHelper == nil { - credHelper = defaultCredentialHelper - } + req.Header = c.extraHeadersFor(req) - netrcFinder := c.Netrc - if netrcFinder == nil { - netrcFinder = defaultNetrcFinder - } - - ef := c.Endpoints - if ef == nil { - ef = defaultEndpointFinder - } - - apiEndpoint, access, creds, credsURL, err := getCreds(credHelper, netrcFinder, ef, remote, req) + apiEndpoint, access, credHelper, credsURL, creds, err := c.getCreds(remote, req) if err != nil { return nil, err } @@ -71,7 +61,7 @@ func (c *Client) doWithCreds(req *http.Request, credHelper CredentialHelper, cre if access == NTLMAccess { return c.doWithNTLM(req, credHelper, creds, credsURL) } - return c.Do(req) + return c.do(req) } // getCreds fills the authorization header for the given request if possible, @@ -94,32 +84,48 @@ func (c *Client) doWithCreds(req *http.Request, credHelper CredentialHelper, cre // 3. The Git Remote URL, which should be something like "https://git.com/repo.git" // This URL is used for the Git Credential Helper. This way existing https // Git remote credentials can be re-used for LFS. -func getCreds(credHelper CredentialHelper, netrcFinder NetrcFinder, ef EndpointFinder, remote string, req *http.Request) (Endpoint, Access, Creds, *url.URL, error) { +func (c *Client) getCreds(remote string, req *http.Request) (Endpoint, Access, CredentialHelper, *url.URL, Creds, error) { + ef := c.Endpoints + if ef == nil { + ef = defaultEndpointFinder + } + + netrcFinder := c.Netrc + if netrcFinder == nil { + netrcFinder = defaultNetrcFinder + } + operation := getReqOperation(req) apiEndpoint := ef.Endpoint(operation, remote) access := ef.AccessFor(apiEndpoint.Url) if access != NTLMAccess { if requestHasAuth(req) || setAuthFromNetrc(netrcFinder, req) || access == NoneAccess { - return apiEndpoint, access, nil, nil, nil + return apiEndpoint, access, nullCreds, nil, nil, nil } credsURL, err := getCredURLForAPI(ef, operation, remote, apiEndpoint, req) if err != nil { - return apiEndpoint, access, nil, nil, errors.Wrap(err, "creds") + return apiEndpoint, access, nullCreds, nil, nil, errors.Wrap(err, "creds") } if credsURL == nil { - return apiEndpoint, access, nil, nil, nil + return apiEndpoint, access, nullCreds, nil, nil, nil } - creds, err := fillGitCreds(credHelper, ef, req, credsURL) - return apiEndpoint, access, creds, credsURL, err + credHelper, creds, err := c.getGitCreds(ef, req, credsURL) + if err == nil { + tracerx.Printf("Filled credentials for %s", credsURL) + setRequestAuth(req, creds["username"], creds["password"]) + } + return apiEndpoint, access, credHelper, credsURL, creds, err } + // NTLM ONLY + credsURL, err := url.Parse(apiEndpoint.Url) if err != nil { - return apiEndpoint, access, nil, nil, errors.Wrap(err, "creds") + return apiEndpoint, access, nullCreds, nil, nil, errors.Wrap(err, "creds") } if netrcMachine := getAuthFromNetrc(netrcFinder, req); netrcMachine != nil { @@ -131,20 +137,16 @@ func getCreds(credHelper CredentialHelper, netrcFinder NetrcFinder, ef EndpointF "source": "netrc", } - return apiEndpoint, access, creds, credsURL, nil + return apiEndpoint, access, nullCreds, credsURL, creds, nil } - creds, err := getGitCreds(credHelper, ef, req, credsURL) - return apiEndpoint, access, creds, credsURL, err + // NTLM uses creds to create the session + credHelper, creds, err := c.getGitCreds(ef, req, credsURL) + return apiEndpoint, access, credHelper, credsURL, creds, err } -func getGitCreds(credHelper CredentialHelper, ef EndpointFinder, req *http.Request, u *url.URL) (Creds, error) { - path := strings.TrimPrefix(u.Path, "/") - input := Creds{"protocol": u.Scheme, "host": u.Host, "path": path} - if u.User != nil && u.User.Username() != "" { - input["username"] = u.User.Username() - } - +func (c *Client) getGitCreds(ef EndpointFinder, req *http.Request, u *url.URL) (CredentialHelper, Creds, error) { + credHelper, input := c.getCredentialHelper(u) creds, err := credHelper.Fill(input) if creds == nil || len(creds) < 1 { errmsg := fmt.Sprintf("Git credentials for %s not found", u) @@ -156,17 +158,7 @@ func getGitCreds(credHelper CredentialHelper, ef EndpointFinder, req *http.Reque err = errors.New(errmsg) } - return creds, err -} - -func fillGitCreds(credHelper CredentialHelper, ef EndpointFinder, req *http.Request, u *url.URL) (Creds, error) { - creds, err := getGitCreds(credHelper, ef, req, u) - if err == nil { - tracerx.Printf("Filled credentials for %s", u) - setRequestAuth(req, creds["username"], creds["password"]) - } - - return creds, err + return credHelper, creds, err } func getAuthFromNetrc(netrcFinder NetrcFinder, req *http.Request) *netrc.Machine { diff --git a/lfsapi/auth_test.go b/lfsapi/auth_test.go index daedcdc1..2a922115 100644 --- a/lfsapi/auth_test.go +++ b/lfsapi/auth_test.go @@ -11,6 +11,7 @@ import ( "testing" "github.com/git-lfs/git-lfs/errors" + "github.com/git-lfs/git-lfs/git" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -93,7 +94,6 @@ func TestDoWithAuthApprove(t *testing.T) { assert.True(t, creds.IsApproved(Creds(map[string]string{ "username": "user", "password": "pass", - "path": "repo/lfs", "protocol": "http", "host": srv.Listener.Addr().String(), }))) @@ -264,6 +264,51 @@ func TestGetCreds(t *testing.T) { "lfs.url": "https://git-server.com/repo/lfs", "lfs.https://git-server.com/repo/lfs.access": "basic", }, + Expected: getCredsExpected{ + Access: BasicAccess, + Endpoint: "https://git-server.com/repo/lfs", + Authorization: basicAuth("git-server.com", "monkey"), + CredsURL: "https://git-server.com/repo/lfs", + Creds: map[string]string{ + "protocol": "https", + "host": "git-server.com", + "username": "git-server.com", + "password": "monkey", + }, + }, + }, + "basic access with usehttppath": getCredsTest{ + Remote: "origin", + Method: "GET", + Href: "https://git-server.com/repo/lfs/locks", + Config: map[string]string{ + "lfs.url": "https://git-server.com/repo/lfs", + "lfs.https://git-server.com/repo/lfs.access": "basic", + "credential.usehttppath": "true", + }, + Expected: getCredsExpected{ + Access: BasicAccess, + Endpoint: "https://git-server.com/repo/lfs", + Authorization: basicAuth("git-server.com", "monkey"), + CredsURL: "https://git-server.com/repo/lfs", + Creds: map[string]string{ + "protocol": "https", + "host": "git-server.com", + "username": "git-server.com", + "password": "monkey", + "path": "repo/lfs", + }, + }, + }, + "basic access with url-specific usehttppath": getCredsTest{ + Remote: "origin", + Method: "GET", + Href: "https://git-server.com/repo/lfs/locks", + Config: map[string]string{ + "lfs.url": "https://git-server.com/repo/lfs", + "lfs.https://git-server.com/repo/lfs.access": "basic", + "credential.https://git-server.com.usehttppath": "true", + }, Expected: getCredsExpected{ Access: BasicAccess, Endpoint: "https://git-server.com/repo/lfs", @@ -295,7 +340,6 @@ func TestGetCreds(t *testing.T) { "host": "git-server.com", "username": "git-server.com", "password": "monkey", - "path": "repo/lfs", }, }, }, @@ -369,7 +413,6 @@ func TestGetCreds(t *testing.T) { "host": "git-server.com", "username": "user", "password": "monkey", - "path": "repo/lfs", }, }, }, @@ -392,7 +435,6 @@ func TestGetCreds(t *testing.T) { "host": "git-server.com", "username": "git-server.com", "password": "monkey", - "path": "repo", }, }, }, @@ -443,7 +485,6 @@ func TestGetCreds(t *testing.T) { "host": "git-server.com", "username": "git-server.com", "password": "monkey", - "path": "repo/lfs/locks", }, }, }, @@ -465,7 +506,6 @@ func TestGetCreds(t *testing.T) { "host": "lfs-server.com", "username": "lfs-server.com", "password": "monkey", - "path": "repo/lfs/locks", }, }, }, @@ -487,7 +527,6 @@ func TestGetCreds(t *testing.T) { "host": "git-server.com:8080", "username": "git-server.com:8080", "password": "monkey", - "path": "repo/lfs/locks", }, }, }, @@ -509,7 +548,6 @@ func TestGetCreds(t *testing.T) { Creds: map[string]string{ "host": "git-server.com", "password": "monkey", - "path": "repo/lfs", "protocol": "https", "username": "git-server.com", }, @@ -517,8 +555,6 @@ func TestGetCreds(t *testing.T) { }, } - credHelper := &fakeCredentialFiller{} - netrcFinder := &fakeNetrc{} for desc, test := range tests { t.Log(desc) req, err := http.NewRequest(test.Method, test.Href, nil) @@ -531,8 +567,12 @@ func TestGetCreds(t *testing.T) { req.Header.Set(key, value) } - ef := NewEndpointFinder(NewContext(nil, nil, test.Config)) - endpoint, access, creds, credsURL, err := getCreds(credHelper, netrcFinder, ef, test.Remote, req) + ctx := NewContext(git.NewConfig("", ""), nil, test.Config) + client, _ := NewClient(ctx) + client.Credentials = &fakeCredentialFiller{} + client.Netrc = &fakeNetrc{} + client.Endpoints = NewEndpointFinder(ctx) + endpoint, access, _, credsURL, creds, err := client.getCreds(test.Remote, req) if !assert.Nil(t, err) { continue } diff --git a/lfsapi/client.go b/lfsapi/client.go index 00968f92..ce20c3ab 100644 --- a/lfsapi/client.go +++ b/lfsapi/client.go @@ -79,8 +79,18 @@ func joinURL(prefix, suffix string) string { return prefix + slash + suffix } +// Do sends an HTTP request to get an HTTP response. It wraps net/http, adding +// extra headers, redirection handling, and error reporting. func (c *Client) Do(req *http.Request) (*http.Response, error) { req.Header = c.extraHeadersFor(req) + + return c.do(req) +} + +// do performs an *http.Request respecting redirects, and handles the response +// as defined in c.handleResponse. Notably, it does not alter the headers for +// the request argument in any way. +func (c *Client) do(req *http.Request) (*http.Response, error) { req.Header.Set("User-Agent", UserAgent) res, err := c.doWithRedirects(c.httpClient(req.Host), req, nil) diff --git a/lfsapi/creds.go b/lfsapi/creds.go index 187043b1..4c113b49 100644 --- a/lfsapi/creds.go +++ b/lfsapi/creds.go @@ -8,28 +8,34 @@ import ( "strings" "sync" - "github.com/git-lfs/git-lfs/config" "github.com/git-lfs/git-lfs/errors" "github.com/rubyist/tracerx" ) -// credsConfig supplies configuration options pertaining to the authorization -// process in package lfsapi. -type credsConfig struct { - // AskPass is a string containing an executable name as well as a - // program arguments. - // - // See: https://git-scm.com/docs/gitcredentials#_requesting_credentials - // for more. - AskPass string `os:"GIT_ASKPASS" git:"core.askpass" os:"SSH_ASKPASS"` - // Helper is a string defining the credential helper that Git should use. - Helper string `git:"credential.helper"` - // Cached is a boolean determining whether or not to enable the - // credential cacher. - Cached bool - // SkipPrompt is a boolean determining whether or not to prompt the user - // for a password. - SkipPrompt bool `os:"GIT_TERMINAL_PROMPT"` +// CredentialHelper is an interface used by the lfsapi Client to interact with +// the 'git credential' command: https://git-scm.com/docs/gitcredentials +// Other implementations include ASKPASS support, and an in-memory cache. +type CredentialHelper interface { + Fill(Creds) (Creds, error) + Reject(Creds) error + Approve(Creds) error +} + +// Creds represents a set of key/value pairs that are passed to 'git credential' +// as input. +type Creds map[string]string + +func bufferCreds(c Creds) *bytes.Buffer { + buf := new(bytes.Buffer) + + for k, v := range c { + buf.Write([]byte(k)) + buf.Write([]byte("=")) + buf.Write([]byte(v)) + buf.Write([]byte("\n")) + } + + return buf } // getCredentialHelper parses a 'credsConfig' from the git and OS environments, @@ -37,110 +43,32 @@ type credsConfig struct { // // It returns an error if any configuration was invalid, or otherwise // un-useable. -func getCredentialHelper(osEnv, gitEnv config.Environment) (CredentialHelper, error) { - ccfg, err := getCredentialConfig(osEnv, gitEnv) - if err != nil { - return nil, err +func (c *Client) getCredentialHelper(u *url.URL) (CredentialHelper, Creds) { + rawurl := fmt.Sprintf("%s://%s%s", u.Scheme, u.Host, u.Path) + input := Creds{"protocol": u.Scheme, "host": u.Host} + if u.User != nil && u.User.Username() != "" { + input["username"] = u.User.Username() + } + if c.uc.Bool("credential", rawurl, "usehttppath", false) { + input["path"] = strings.TrimPrefix(u.Path, "/") } - var hs []CredentialHelper - if len(ccfg.Helper) == 0 && len(ccfg.AskPass) > 0 { - hs = append(hs, &AskPassCredentialHelper{ - Program: ccfg.AskPass, - }) + if c.Credentials != nil { + return c.Credentials, input } - var h CredentialHelper - h = &commandCredentialHelper{ - SkipPrompt: ccfg.SkipPrompt, + helpers := make([]CredentialHelper, 0, 3) + if c.cachingCredHelper != nil { + helpers = append(helpers, c.cachingCredHelper) } - - if ccfg.Cached { - h = withCredentialCache(h) - } - hs = append(hs, h) - - switch len(hs) { - case 0: - return nil, nil - case 1: - return hs[0], nil - } - return CredentialHelpers(hs), nil -} - -// getCredentialConfig parses a *credsConfig given the OS and Git -// configurations. -func getCredentialConfig(o, g config.Environment) (*credsConfig, error) { - askpass, ok := o.Get("GIT_ASKPASS") - if !ok { - askpass, ok = g.Get("core.askpass") - } - if !ok { - askpass, ok = o.Get("SSH_ASKPASS") - } - helper, _ := g.Get("credential.helper") - what := &credsConfig{ - AskPass: askpass, - Helper: helper, - Cached: g.Bool("lfs.cachecredentials", true), - SkipPrompt: o.Bool("GIT_TERMINAL_PROMPT", false), - } - - return what, nil -} - -// CredentialHelpers is a []CredentialHelper that iterates through each -// credential helper to fill, reject, or approve credentials. -type CredentialHelpers []CredentialHelper - -// Fill implements CredentialHelper.Fill by asking each CredentialHelper in -// order to fill the credentials. -// -// If a fill was successful, it is returned immediately, and no other -// `CredentialHelper`s are consulted. If any CredentialHelper returns an error, -// it is returned immediately. -func (h CredentialHelpers) Fill(what Creds) (Creds, error) { - for _, c := range h { - creds, err := c.Fill(what) - if err != nil { - return nil, err - } - - if creds != nil { - return creds, nil + if c.askpassCredHelper != nil { + helper, _ := c.uc.Get("credential", rawurl, "helper") + if len(helper) == 0 { + helpers = append(helpers, c.askpassCredHelper) } } - return nil, nil -} - -// Reject implements CredentialHelper.Reject and rejects the given Creds "what" -// amongst all knonw CredentialHelpers. If any `CredentialHelper`s returned a -// non-nil error, no further `CredentialHelper`s are notified, so as to prevent -// inconsistent state. -func (h CredentialHelpers) Reject(what Creds) error { - for _, c := range h { - if err := c.Reject(what); err != nil { - return err - } - } - - return nil -} - -// Approve implements CredentialHelper.Approve and approves the given Creds -// "what" amongst all known CredentialHelpers. If any `CredentialHelper`s -// returned a non-nil error, no further `CredentialHelper`s are notified, so as -// to prevent inconsistent state. -func (h CredentialHelpers) Approve(what Creds) error { - for _, c := range h { - if err := c.Approve(what); err != nil { - return err - } - } - - return nil + return NewCredentialHelpers(append(helpers, c.commandCredHelper)), input } // AskPassCredentialHelper implements the CredentialHelper type for GIT_ASKPASS @@ -234,88 +162,6 @@ func (a *AskPassCredentialHelper) args(prompt string) []string { return []string{prompt} } -type CredentialHelper interface { - Fill(Creds) (Creds, error) - Reject(Creds) error - Approve(Creds) error -} - -type Creds map[string]string - -func bufferCreds(c Creds) *bytes.Buffer { - buf := new(bytes.Buffer) - - for k, v := range c { - buf.Write([]byte(k)) - buf.Write([]byte("=")) - buf.Write([]byte(v)) - buf.Write([]byte("\n")) - } - - return buf -} - -func withCredentialCache(helper CredentialHelper) CredentialHelper { - return &credentialCacher{ - cmu: new(sync.Mutex), - creds: make(map[string]Creds), - helper: helper, - } -} - -type credentialCacher struct { - // cmu guards creds - cmu *sync.Mutex - creds map[string]Creds - helper CredentialHelper -} - -func credCacheKey(creds Creds) string { - parts := []string{ - creds["protocol"], - creds["host"], - creds["path"], - } - return strings.Join(parts, "//") -} - -func (c *credentialCacher) Fill(creds Creds) (Creds, error) { - key := credCacheKey(creds) - - c.cmu.Lock() - defer c.cmu.Unlock() - - if cache, ok := c.creds[key]; ok { - tracerx.Printf("creds: git credential cache (%q, %q, %q)", - creds["protocol"], creds["host"], creds["path"]) - return cache, nil - } - - creds, err := c.helper.Fill(creds) - if err == nil && len(creds["username"]) > 0 && len(creds["password"]) > 0 { - c.creds[key] = creds - } - return creds, err -} - -func (c *credentialCacher) Reject(creds Creds) error { - c.cmu.Lock() - defer c.cmu.Unlock() - - delete(c.creds, credCacheKey(creds)) - return c.helper.Reject(creds) -} - -func (c *credentialCacher) Approve(creds Creds) error { - err := c.helper.Approve(creds) - if err == nil { - c.cmu.Lock() - c.creds[credCacheKey(creds)] = creds - c.cmu.Unlock() - } - return err -} - type commandCredentialHelper struct { SkipPrompt bool } @@ -332,6 +178,8 @@ func (h *commandCredentialHelper) Reject(creds Creds) error { } func (h *commandCredentialHelper) Approve(creds Creds) error { + tracerx.Printf("creds: git credential approve (%q, %q, %q)", + creds["protocol"], creds["host"], creds["path"]) _, err := h.exec("approve", creds) return err } @@ -383,3 +231,198 @@ func (h *commandCredentialHelper) exec(subcommand string, input Creds) (Creds, e return creds, nil } + +type credentialCacher struct { + creds map[string]Creds + mu sync.Mutex +} + +func newCredentialCacher() *credentialCacher { + return &credentialCacher{creds: make(map[string]Creds)} +} + +func credCacheKey(creds Creds) string { + parts := []string{ + creds["protocol"], + creds["host"], + creds["path"], + } + return strings.Join(parts, "//") +} + +func (c *credentialCacher) Fill(what Creds) (Creds, error) { + key := credCacheKey(what) + c.mu.Lock() + cached, ok := c.creds[key] + c.mu.Unlock() + + if ok { + tracerx.Printf("creds: git credential cache (%q, %q, %q)", + what["protocol"], what["host"], what["path"]) + return cached, nil + } + + return nil, credHelperNoOp +} + +func (c *credentialCacher) Approve(what Creds) error { + key := credCacheKey(what) + + c.mu.Lock() + defer c.mu.Unlock() + + if _, ok := c.creds[key]; ok { + return nil + } + + c.creds[key] = what + return credHelperNoOp +} + +func (c *credentialCacher) Reject(what Creds) error { + key := credCacheKey(what) + c.mu.Lock() + delete(c.creds, key) + c.mu.Unlock() + return credHelperNoOp +} + +// CredentialHelpers iterates through a slice of CredentialHelper objects +// CredentialHelpers is a []CredentialHelper that iterates through each +// credential helper to fill, reject, or approve credentials. Typically, the +// first success returns immediately. Errors are reported to tracerx, unless +// all credential helpers return errors. Any erroring credential helpers are +// skipped for future calls. +// +// A CredentialHelper can return a credHelperNoOp error, signaling that the +// CredentialHelpers should try the next one. +type CredentialHelpers struct { + helpers []CredentialHelper + skippedHelpers map[int]bool + mu sync.Mutex +} + +// NewCredentialHelpers initializes a new CredentialHelpers from the given +// slice of CredentialHelper instances. +func NewCredentialHelpers(helpers []CredentialHelper) CredentialHelper { + return &CredentialHelpers{ + helpers: helpers, + skippedHelpers: make(map[int]bool), + } +} + +var credHelperNoOp = errors.New("no-op!") + +// Fill implements CredentialHelper.Fill by asking each CredentialHelper in +// order to fill the credentials. +// +// If a fill was successful, it is returned immediately, and no other +// `CredentialHelper`s are consulted. If any CredentialHelper returns an error, +// it is reported to tracerx, and the next one is attempted. If they all error, +// then a collection of all the error messages is returned. Erroring credential +// helpers are added to the skip list, and never attempted again for the +// lifetime of the current Git LFS command. +func (s *CredentialHelpers) Fill(what Creds) (Creds, error) { + errs := make([]string, 0, len(s.helpers)) + for i, h := range s.helpers { + if s.skipped(i) { + continue + } + + creds, err := h.Fill(what) + if err != nil { + if err != credHelperNoOp { + s.skip(i) + tracerx.Printf("credential fill error: %s", err) + errs = append(errs, err.Error()) + } + continue + } + + if creds != nil { + return creds, nil + } + } + + if len(errs) > 0 { + return nil, errors.New("credential fill errors:\n" + strings.Join(errs, "\n")) + } + + return nil, nil +} + +// Reject implements CredentialHelper.Reject and rejects the given Creds "what" +// with the first successful attempt. +func (s *CredentialHelpers) Reject(what Creds) error { + for i, h := range s.helpers { + if s.skipped(i) { + continue + } + + if err := h.Reject(what); err != credHelperNoOp { + return err + } + } + + return errors.New("no valid credential helpers to reject") +} + +// Approve implements CredentialHelper.Approve and approves the given Creds +// "what" with the first successful CredentialHelper. If an error occurrs, +// it calls Reject() with the same Creds and returns the error immediately. This +// ensures a caching credential helper removes the cache, since the Erroring +// CredentialHelper never successfully saved it. +func (s *CredentialHelpers) Approve(what Creds) error { + skipped := make(map[int]bool) + for i, h := range s.helpers { + if s.skipped(i) { + skipped[i] = true + continue + } + + if err := h.Approve(what); err != credHelperNoOp { + if err != nil && i > 0 { // clear any cached approvals + for j := 0; j < i; j++ { + if !skipped[j] { + s.helpers[j].Reject(what) + } + } + } + return err + } + } + + return errors.New("no valid credential helpers to approve") +} + +func (s *CredentialHelpers) skip(i int) { + s.mu.Lock() + s.skippedHelpers[i] = true + s.mu.Unlock() +} + +func (s *CredentialHelpers) skipped(i int) bool { + s.mu.Lock() + skipped := s.skippedHelpers[i] + s.mu.Unlock() + return skipped +} + +type nullCredentialHelper struct{} + +var ( + nullCredError = errors.New("No credential helper configured") + nullCreds = &nullCredentialHelper{} +) + +func (h *nullCredentialHelper) Fill(input Creds) (Creds, error) { + return nil, nullCredError +} + +func (h *nullCredentialHelper) Approve(creds Creds) error { + return nil +} + +func (h *nullCredentialHelper) Reject(creds Creds) error { + return nil +} diff --git a/lfsapi/creds_test.go b/lfsapi/creds_test.go index b0bc01ce..4e78e066 100644 --- a/lfsapi/creds_test.go +++ b/lfsapi/creds_test.go @@ -5,264 +5,262 @@ import ( "testing" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) -// test that cache satisfies Fill() without looking at creds -func TestCredsCacheFillFromCache(t *testing.T) { - creds := newFakeCreds() - cache := withCredentialCache(creds).(*credentialCacher) - cache.creds["http//lfs.test//foo/bar"] = Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - "username": "u", - "password": "p", +type testCredHelper struct { + fillErr error + approveErr error + rejectErr error + fill []Creds + approve []Creds + reject []Creds +} + +func newTestCredHelper() *testCredHelper { + return &testCredHelper{ + fill: make([]Creds, 0), + approve: make([]Creds, 0), + reject: make([]Creds, 0), + } +} + +func (h *testCredHelper) Fill(input Creds) (Creds, error) { + h.fill = append(h.fill, input) + return input, h.fillErr +} + +func (h *testCredHelper) Approve(creds Creds) error { + h.approve = append(h.approve, creds) + return h.approveErr +} + +func (h *testCredHelper) Reject(creds Creds) error { + h.reject = append(h.reject, creds) + return h.rejectErr +} + +func TestCredHelperSetNoErrors(t *testing.T) { + cache := newCredentialCacher() + helper1 := newTestCredHelper() + helper2 := newTestCredHelper() + helpers := NewCredentialHelpers([]CredentialHelper{cache, helper1, helper2}) + creds := Creds{"protocol": "https", "host": "example.com"} + + out, err := helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, creds, out) + assert.Equal(t, 1, len(helper1.fill)) + assert.Equal(t, 0, len(helper2.fill)) + + // calling Fill() with empty cache + out, err = helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, creds, out) + assert.Equal(t, 2, len(helper1.fill)) + assert.Equal(t, 0, len(helper2.fill)) + + credsWithPass := Creds{"protocol": "https", "host": "example.com", "username": "foo", "password": "bar"} + assert.Nil(t, helpers.Approve(credsWithPass)) + assert.Equal(t, 1, len(helper1.approve)) + assert.Equal(t, 0, len(helper2.approve)) + + // calling Approve() again is cached + assert.Nil(t, helpers.Approve(credsWithPass)) + assert.Equal(t, 1, len(helper1.approve)) + assert.Equal(t, 0, len(helper2.approve)) + + // access cache + for i := 0; i < 3; i++ { + out, err = helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, credsWithPass, out) + assert.Equal(t, 2, len(helper1.fill)) + assert.Equal(t, 0, len(helper2.fill)) } - filled, err := cache.Fill(Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - }) - assert.Nil(t, err) - require.NotNil(t, filled) - assert.Equal(t, "u", filled["username"]) - assert.Equal(t, "p", filled["password"]) + assert.Nil(t, helpers.Reject(creds)) + assert.Equal(t, 1, len(helper1.reject)) + assert.Equal(t, 0, len(helper2.reject)) - assert.Equal(t, 1, len(cache.creds)) - cached, ok := cache.creds["http//lfs.test//foo/bar"] - assert.True(t, ok) - assert.Equal(t, "u", cached["username"]) - assert.Equal(t, "p", cached["password"]) + // Reject() is never cached + assert.Nil(t, helpers.Reject(creds)) + assert.Equal(t, 2, len(helper1.reject)) + assert.Equal(t, 0, len(helper2.reject)) + + // calling Fill() with empty cache + out, err = helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, creds, out) + assert.Equal(t, 3, len(helper1.fill)) + assert.Equal(t, 0, len(helper2.fill)) } -// test that cache caches Fill() value from creds -func TestCredsCacheFillFromValidHelperFill(t *testing.T) { - creds := newFakeCreds() - cache := withCredentialCache(creds).(*credentialCacher) +func TestCredHelperSetFillError(t *testing.T) { + cache := newCredentialCacher() + helper1 := newTestCredHelper() + helper2 := newTestCredHelper() + helpers := NewCredentialHelpers([]CredentialHelper{cache, helper1, helper2}) + creds := Creds{"protocol": "https", "host": "example.com"} - creds.list = append(creds.list, Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - "username": "u", - "password": "p", - }) - - assert.Equal(t, 0, len(cache.creds)) - - filled, err := cache.Fill(Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - }) + helper1.fillErr = errors.New("boom") + out, err := helpers.Fill(creds) assert.Nil(t, err) - require.NotNil(t, filled) - assert.Equal(t, "u", filled["username"]) - assert.Equal(t, "p", filled["password"]) + assert.Equal(t, creds, out) + assert.Equal(t, 1, len(helper1.fill)) + assert.Equal(t, 1, len(helper2.fill)) - assert.Equal(t, 1, len(cache.creds)) - cached, ok := cache.creds["http//lfs.test//foo/bar"] - assert.True(t, ok) - assert.Equal(t, "u", cached["username"]) - assert.Equal(t, "p", cached["password"]) + assert.Nil(t, helpers.Approve(creds)) + assert.Equal(t, 0, len(helper1.approve)) + assert.Equal(t, 1, len(helper2.approve)) - creds.list = make([]Creds, 0) - filled2, err := cache.Fill(Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - }) - assert.Nil(t, err) - require.NotNil(t, filled2) - assert.Equal(t, "u", filled2["username"]) - assert.Equal(t, "p", filled2["password"]) -} - -// test that cache ignores Fill() value from creds with missing username+password -func TestCredsCacheFillFromInvalidHelperFill(t *testing.T) { - creds := newFakeCreds() - cache := withCredentialCache(creds).(*credentialCacher) - - creds.list = append(creds.list, Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - "username": "no-password", - }) - - assert.Equal(t, 0, len(cache.creds)) - - filled, err := cache.Fill(Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - "username": "u", - "password": "p", - }) - assert.Nil(t, err) - require.NotNil(t, filled) - assert.Equal(t, "no-password", filled["username"]) - assert.Equal(t, "", filled["password"]) - - assert.Equal(t, 0, len(cache.creds)) -} - -// test that cache ignores Fill() value from creds with error -func TestCredsCacheFillFromErroringHelperFill(t *testing.T) { - creds := newFakeCreds() - cache := withCredentialCache(&erroringCreds{creds}).(*credentialCacher) - - creds.list = append(creds.list, Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - "username": "u", - "password": "p", - }) - - assert.Equal(t, 0, len(cache.creds)) - - filled, err := cache.Fill(Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - }) - assert.NotNil(t, err) - require.NotNil(t, filled) - assert.Equal(t, "u", filled["username"]) - assert.Equal(t, "p", filled["password"]) - - assert.Equal(t, 0, len(cache.creds)) -} - -func TestCredsCacheRejectWithoutError(t *testing.T) { - creds := newFakeCreds() - cache := withCredentialCache(creds).(*credentialCacher) - - cache.creds["http//lfs.test//foo/bar"] = Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - "username": "u", - "password": "p", + // Fill() with cache + for i := 0; i < 3; i++ { + out, err = helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, creds, out) + assert.Equal(t, 1, len(helper1.fill)) + assert.Equal(t, 1, len(helper2.fill)) } - err := cache.Reject(Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - }) + assert.Nil(t, helpers.Reject(creds)) + assert.Equal(t, 0, len(helper1.reject)) + assert.Equal(t, 1, len(helper2.reject)) + + // Fill() with empty cache + out, err = helpers.Fill(creds) assert.Nil(t, err) - assert.Equal(t, 0, len(cache.creds)) + assert.Equal(t, creds, out) + assert.Equal(t, 1, len(helper1.fill)) // still skipped + assert.Equal(t, 2, len(helper2.fill)) } -func TestCredsCacheRejectWithError(t *testing.T) { - creds := newFakeCreds() - cache := withCredentialCache(&erroringCreds{creds}).(*credentialCacher) +func TestCredHelperSetApproveError(t *testing.T) { + cache := newCredentialCacher() + helper1 := newTestCredHelper() + helper2 := newTestCredHelper() + helpers := NewCredentialHelpers([]CredentialHelper{cache, helper1, helper2}) + creds := Creds{"protocol": "https", "host": "example.com"} - cache.creds["http//lfs.test//foo/bar"] = Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - "username": "u", - "password": "p", - } - - err := cache.Reject(Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - }) - assert.NotNil(t, err) - assert.Equal(t, 0, len(cache.creds)) -} - -func TestCredsCacheApproveWithoutError(t *testing.T) { - creds := newFakeCreds() - cache := withCredentialCache(creds).(*credentialCacher) - - assert.Equal(t, 0, len(cache.creds)) - - err := cache.Approve(Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - "username": "U", - "password": "P", - }) + approveErr := errors.New("boom") + helper1.approveErr = approveErr + out, err := helpers.Fill(creds) assert.Nil(t, err) - assert.Equal(t, 1, len(cache.creds)) - cached, ok := cache.creds["http//lfs.test//foo/bar"] - assert.True(t, ok) - assert.Equal(t, "U", cached["username"]) - assert.Equal(t, "P", cached["password"]) + assert.Equal(t, creds, out) + assert.Equal(t, 1, len(helper1.fill)) + assert.Equal(t, 0, len(helper2.fill)) + + assert.Equal(t, approveErr, helpers.Approve(creds)) + assert.Equal(t, 1, len(helper1.approve)) + assert.Equal(t, 0, len(helper2.approve)) + + // cache is never set + out, err = helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, creds, out) + assert.Equal(t, 2, len(helper1.fill)) + assert.Equal(t, 0, len(helper2.fill)) + + assert.Nil(t, helpers.Reject(creds)) + assert.Equal(t, 1, len(helper1.reject)) + assert.Equal(t, 0, len(helper2.reject)) } -func TestCredsCacheApproveWithError(t *testing.T) { - creds := newFakeCreds() - cache := withCredentialCache(&erroringCreds{creds}).(*credentialCacher) +func TestCredHelperSetFillAndApproveError(t *testing.T) { + cache := newCredentialCacher() + helper1 := newTestCredHelper() + helper2 := newTestCredHelper() + helpers := NewCredentialHelpers([]CredentialHelper{cache, helper1, helper2}) + creds := Creds{"protocol": "https", "host": "example.com"} - assert.Equal(t, 0, len(cache.creds)) + credErr := errors.New("boom") + helper1.fillErr = credErr + helper2.approveErr = credErr - err := cache.Approve(Creds{ - "protocol": "http", - "host": "lfs.test", - "path": "foo/bar", - "username": "u", - "password": "p", - }) - assert.NotNil(t, err) - assert.Equal(t, 0, len(cache.creds)) + out, err := helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, creds, out) + assert.Equal(t, 1, len(helper1.fill)) + assert.Equal(t, 1, len(helper2.fill)) + + assert.Equal(t, credErr, helpers.Approve(creds)) + assert.Equal(t, 0, len(helper1.approve)) // skipped + assert.Equal(t, 0, len(helper1.reject)) // skipped + assert.Equal(t, 1, len(helper2.approve)) + + // never approved, so cache is empty + out, err = helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, creds, out) + assert.Equal(t, 1, len(helper1.fill)) // still skipped + assert.Equal(t, 2, len(helper2.fill)) } -func newFakeCreds() *fakeCreds { - return &fakeCreds{list: make([]Creds, 0)} +func TestCredHelperSetRejectError(t *testing.T) { + cache := newCredentialCacher() + helper1 := newTestCredHelper() + helper2 := newTestCredHelper() + helpers := NewCredentialHelpers([]CredentialHelper{cache, helper1, helper2}) + creds := Creds{"protocol": "https", "host": "example.com"} + + rejectErr := errors.New("boom") + helper1.rejectErr = rejectErr + out, err := helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, creds, out) + assert.Equal(t, 1, len(helper1.fill)) + assert.Equal(t, 0, len(helper2.fill)) + + assert.Nil(t, helpers.Approve(creds)) + assert.Equal(t, 1, len(helper1.approve)) + assert.Equal(t, 0, len(helper2.approve)) + + // Fill() with cache + out, err = helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, creds, out) + assert.Equal(t, 1, len(helper1.fill)) + assert.Equal(t, 0, len(helper2.fill)) + + assert.Equal(t, rejectErr, helpers.Reject(creds)) + assert.Equal(t, 1, len(helper1.reject)) + assert.Equal(t, 0, len(helper2.reject)) + + // failed Reject() still clears cache + out, err = helpers.Fill(creds) + assert.Nil(t, err) + assert.Equal(t, creds, out) + assert.Equal(t, 2, len(helper1.fill)) + assert.Equal(t, 0, len(helper2.fill)) } -type erroringCreds struct { - helper CredentialHelper -} +func TestCredHelperSetAllFillErrors(t *testing.T) { + cache := newCredentialCacher() + helper1 := newTestCredHelper() + helper2 := newTestCredHelper() + helpers := NewCredentialHelpers([]CredentialHelper{cache, helper1, helper2}) + creds := Creds{"protocol": "https", "host": "example.com"} -func (e *erroringCreds) Fill(creds Creds) (Creds, error) { - c, _ := e.helper.Fill(creds) - return c, errors.New("fill error") -} - -func (e *erroringCreds) Reject(creds Creds) error { - e.helper.Reject(creds) - return errors.New("reject error") -} - -func (e *erroringCreds) Approve(creds Creds) error { - e.helper.Approve(creds) - return errors.New("approve error") -} - -type fakeCreds struct { - list []Creds -} - -func credsMatch(c1, c2 Creds) bool { - return c1["protocol"] == c2["protocol"] && - c1["host"] == c2["host"] && - c1["path"] == c2["path"] -} - -func (f *fakeCreds) Fill(creds Creds) (Creds, error) { - for _, saved := range f.list { - if credsMatch(creds, saved) { - return saved, nil - } + helper1.fillErr = errors.New("boom 1") + helper2.fillErr = errors.New("boom 2") + out, err := helpers.Fill(creds) + if assert.NotNil(t, err) { + assert.Equal(t, "credential fill errors:\nboom 1\nboom 2", err.Error()) } - return creds, nil -} + assert.Nil(t, out) + assert.Equal(t, 1, len(helper1.fill)) + assert.Equal(t, 1, len(helper2.fill)) -func (f *fakeCreds) Reject(creds Creds) error { - return nil -} + err = helpers.Approve(creds) + if assert.NotNil(t, err) { + assert.Equal(t, "no valid credential helpers to approve", err.Error()) + } + assert.Equal(t, 0, len(helper1.approve)) + assert.Equal(t, 0, len(helper2.approve)) -func (f *fakeCreds) Approve(creds Creds) error { - return nil + err = helpers.Reject(creds) + if assert.NotNil(t, err) { + assert.Equal(t, "no valid credential helpers to reject", err.Error()) + } + assert.Equal(t, 0, len(helper1.reject)) + assert.Equal(t, 0, len(helper2.reject)) } diff --git a/lfsapi/lfsapi.go b/lfsapi/lfsapi.go index 898391dd..db20dab8 100644 --- a/lfsapi/lfsapi.go +++ b/lfsapi/lfsapi.go @@ -6,8 +6,6 @@ import ( "io" "net/http" "regexp" - "strconv" - "strings" "sync" "github.com/ThomsonReutersEikon/go-ntlm/ntlm" @@ -47,10 +45,12 @@ type Client struct { LoggingStats bool // DEPRECATED - // only used for per-host ssl certs - gitEnv config.Environment - osEnv config.Environment - uc *config.URLConfig + commandCredHelper *commandCredentialHelper + askpassCredHelper *AskPassCredentialHelper + cachingCredHelper *credentialCacher + gitEnv config.Environment + osEnv config.Environment + uc *config.URLConfig } type Context interface { @@ -71,19 +71,14 @@ func NewClient(ctx Context) (*Client, error) { return nil, errors.Wrap(err, fmt.Sprintf("bad netrc file %s", netrcfile)) } - creds, err := getCredentialHelper(osEnv, gitEnv) - if err != nil { - return nil, errors.Wrap(err, "cannot find credential helper(s)") - } - + cacheCreds := gitEnv.Bool("lfs.cachecredentials", true) var sshResolver SSHResolver = &sshAuthClient{os: osEnv} - if gitEnv.Bool("lfs.cachecredentials", true) { + if cacheCreds { sshResolver = withSSHCache(sshResolver) } c := &Client{ Endpoints: NewEndpointFinder(ctx), - Credentials: creds, SSH: sshResolver, Netrc: netrc, DialTimeout: gitEnv.Int("lfs.dialtimeout", 0), @@ -93,9 +88,29 @@ func NewClient(ctx Context) (*Client, error) { SkipSSLVerify: !gitEnv.Bool("http.sslverify", true) || osEnv.Bool("GIT_SSL_NO_VERIFY", false), Verbose: osEnv.Bool("GIT_CURL_VERBOSE", false), DebuggingVerbose: osEnv.Bool("LFS_DEBUG_HTTP", false), - gitEnv: gitEnv, - osEnv: osEnv, - uc: config.NewURLConfig(gitEnv), + commandCredHelper: &commandCredentialHelper{ + SkipPrompt: osEnv.Bool("GIT_TERMINAL_PROMPT", false), + }, + gitEnv: gitEnv, + osEnv: osEnv, + uc: config.NewURLConfig(gitEnv), + } + + askpass, ok := osEnv.Get("GIT_ASKPASS") + if !ok { + askpass, ok = gitEnv.Get("core.askpass") + } + if !ok { + askpass, _ = osEnv.Get("SSH_ASKPASS") + } + if len(askpass) > 0 { + c.askpassCredHelper = &AskPassCredentialHelper{ + Program: askpass, + } + } + + if cacheCreds { + c.cachingCredHelper = newCredentialCacher() } return c, nil @@ -191,34 +206,14 @@ func (e testEnv) GetAll(key string) []string { return make([]string, 0) } -func (e testEnv) Int(key string, def int) (val int) { +func (e testEnv) Int(key string, def int) int { s, _ := e.Get(key) - if len(s) == 0 { - return def - } - - i, err := strconv.Atoi(s) - if err != nil { - return def - } - - return i + return config.Int(s, def) } -func (e testEnv) Bool(key string, def bool) (val bool) { +func (e testEnv) Bool(key string, def bool) bool { s, _ := e.Get(key) - if len(s) == 0 { - return def - } - - switch strings.ToLower(s) { - case "true", "1", "on", "yes", "t": - return true - case "false", "0", "off", "no", "f": - return false - default: - return false - } + return config.Bool(s, def) } func (e testEnv) All() map[string][]string { diff --git a/lfsapi/ntlm.go b/lfsapi/ntlm.go index b180b08a..b3b457b8 100644 --- a/lfsapi/ntlm.go +++ b/lfsapi/ntlm.go @@ -14,7 +14,7 @@ import ( ) func (c *Client) doWithNTLM(req *http.Request, credHelper CredentialHelper, creds Creds, credsURL *url.URL) (*http.Response, error) { - res, err := c.Do(req) + res, err := c.do(req) if err != nil && !errors.IsAuthError(err) { return res, err } @@ -69,7 +69,7 @@ func (c *Client) ntlmReAuth(req *http.Request, credHelper CredentialHelper, cred func (c *Client) ntlmNegotiate(req *http.Request, message string) (*http.Response, []byte, error) { req.Header.Add("Authorization", message) - res, err := c.Do(req) + res, err := c.do(req) if err != nil && !errors.IsAuthError(err) { return res, nil, err } @@ -100,7 +100,7 @@ func (c *Client) ntlmChallenge(req *http.Request, challengeBytes []byte, creds C authMsg := base64.StdEncoding.EncodeToString(authenticate.Bytes()) req.Header.Set("Authorization", "NTLM "+authMsg) - return c.Do(req) + return c.do(req) } func (c *Client) ntlmClientSession(creds Creds) (ntlm.ClientSession, error) { diff --git a/lfsapi/ntlm_test.go b/lfsapi/ntlm_test.go index fa776173..ab9eba94 100644 --- a/lfsapi/ntlm_test.go +++ b/lfsapi/ntlm_test.go @@ -90,7 +90,6 @@ func TestNTLMAuth(t *testing.T) { creds := Creds{ "protocol": srvURL.Scheme, "host": srvURL.Host, - "path": "ntlm", "username": "ntlmdomain\\ntlmuser", "password": "ntlmpass", } diff --git a/locking/api.go b/locking/api.go index 12f6d1a0..1ba55120 100644 --- a/locking/api.go +++ b/locking/api.go @@ -12,6 +12,10 @@ type lockClient struct { *lfsapi.Client } +type lockRef struct { + Name string `json:"name,omitempty"` +} + // LockRequest encapsulates the payload sent across the API when a client would // like to obtain a lock against a particular path on a given remote. type lockRequest struct { @@ -192,6 +196,8 @@ func (c *lockClient) Search(remote string, searchReq *lockSearchRequest) (*lockL // lockVerifiableRequest encapsulates the request sent to the server when the // client would like a list of locks to verify a Git push. type lockVerifiableRequest struct { + Ref *lockRef `json:"ref"` + // Cursor is an optional field used to tell the server which lock was // seen last, if scanning through multiple pages of results. // diff --git a/locking/locks.go b/locking/locks.go index feb0a537..31b91425 100644 --- a/locking/locks.go +++ b/locking/locks.go @@ -206,13 +206,16 @@ func (c *Client) SearchLocks(filter map[string]string, limit int, localOnly bool } } -func (c *Client) VerifiableLocks(limit int) (ourLocks, theirLocks []Lock, err error) { +func (c *Client) VerifiableLocks(ref *git.Ref, limit int) (ourLocks, theirLocks []Lock, err error) { ourLocks = make([]Lock, 0, limit) theirLocks = make([]Lock, 0, limit) body := &lockVerifiableRequest{ + Ref: &lockRef{Name: ref.Refspec()}, Limit: limit, } + c.cache.Clear() + for { list, res, err := c.client.SearchVerifiable(c.Remote, body) if res != nil { @@ -236,6 +239,7 @@ func (c *Client) VerifiableLocks(limit int) (ourLocks, theirLocks []Lock, err er } for _, l := range list.Ours { + c.cache.Add(l) ourLocks = append(ourLocks, l) if limit > 0 && (len(ourLocks)+len(theirLocks)) >= limit { return ourLocks, theirLocks, nil @@ -243,6 +247,7 @@ func (c *Client) VerifiableLocks(limit int) (ourLocks, theirLocks []Lock, err er } for _, l := range list.Theirs { + c.cache.Add(l) theirLocks = append(theirLocks, l) if limit > 0 && (len(ourLocks)+len(theirLocks)) >= limit { return ourLocks, theirLocks, nil @@ -349,23 +354,6 @@ func (c *Client) lockIdFromPath(path string) (string, error) { } } -// Fetch locked files for the current user and cache them locally -// This can be used to sync up locked files when moving machines -func (c *Client) refreshLockCache() error { - ourLocks, _, err := c.VerifiableLocks(0) - if err != nil { - return err - } - - // We're going to overwrite the entire local cache - c.cache.Clear() - for _, l := range ourLocks { - c.cache.Add(l) - } - - return nil -} - // IsFileLockedByCurrentCommitter returns whether a file is locked by the // current user, as cached locally func (c *Client) IsFileLockedByCurrentCommitter(path string) bool { diff --git a/locking/locks_test.go b/locking/locks_test.go index 0e1b6603..6cf55ad6 100644 --- a/locking/locks_test.go +++ b/locking/locks_test.go @@ -64,8 +64,7 @@ func TestRefreshCache(t *testing.T) { assert.Nil(t, err) assert.Empty(t, locks) - // Should load from test data, just Fred's - err = client.refreshLockCache() + _, _, err = client.VerifiableLocks(nil, 100) assert.Nil(t, err) locks, err = client.SearchLocks(nil, 0, true) @@ -79,6 +78,8 @@ func TestRefreshCache(t *testing.T) { Lock{Path: "folder/test1.dat", Id: "101", Owner: &User{Name: "Fred"}, LockedAt: zeroTime}, Lock{Path: "folder/test2.dat", Id: "102", Owner: &User{Name: "Fred"}, LockedAt: zeroTime}, Lock{Path: "root.dat", Id: "103", Owner: &User{Name: "Fred"}, LockedAt: zeroTime}, + Lock{Path: "other/test1.dat", Id: "199", Owner: &User{Name: "Charles"}, LockedAt: zeroTime}, + Lock{Path: "folder/test3.dat", Id: "99", Owner: &User{Name: "Alice"}, LockedAt: zeroTime}, }, locks) } @@ -129,7 +130,7 @@ func TestGetVerifiableLocks(t *testing.T) { client, err := NewClient("", lfsclient) assert.Nil(t, err) - ourLocks, theirLocks, err := client.VerifiableLocks(0) + ourLocks, theirLocks, err := client.VerifiableLocks(nil, 0) assert.Nil(t, err) // Need to include zero time in structure for equal to work diff --git a/script/integration.go b/script/integration.go index 68b1beb8..e67b30a9 100644 --- a/script/integration.go +++ b/script/integration.go @@ -50,17 +50,24 @@ func mainIntegration() { for _, file := range files { tests <- file } + close(tests) + + outputDone := make(chan bool) + go func() { + for out := range output { + fmt.Println(out) + } + outputDone <- true + }() - go printOutput(output) for i := 0; i < maxprocs; i++ { wg.Add(1) go worker(tests, output, &wg) } - close(tests) wg.Wait() close(output) - printOutput(output) + <-outputDone if erroring { os.Exit(1) @@ -116,19 +123,6 @@ func sendTestOutput(output chan string, testname string, buf *bytes.Buffer, err } } -func printOutput(output <-chan string) { - for { - select { - case out, ok := <-output: - if !ok { - return - } - - fmt.Println(out) - } - } -} - func worker(tests <-chan string, output chan string, wg *sync.WaitGroup) { defer wg.Done() for { diff --git a/test/cmd/lfstest-gitserver.go b/test/cmd/lfstest-gitserver.go index 7bb58ec2..f3aee82e 100644 --- a/test/cmd/lfstest-gitserver.go +++ b/test/cmd/lfstest-gitserver.go @@ -881,11 +881,23 @@ type LockList struct { Message string `json:"message,omitempty"` } +type Ref struct { + Name string `json:"name,omitempty"` +} + type VerifiableLockRequest struct { + Ref *Ref `json:"ref,omitempty"` Cursor string `json:"cursor,omitempty"` Limit int `json:"limit,omitempty"` } +func (r *VerifiableLockRequest) RefName() string { + if r.Ref == nil { + return "" + } + return r.Ref.Name +} + type VerifiableLockList struct { Ours []Lock `json:"ours"` Theirs []Lock `json:"theirs"` @@ -1089,6 +1101,18 @@ func locksHandler(w http.ResponseWriter, r *http.Request, repo string) { return } + if strings.HasSuffix(repo, "branch-required") { + parts := strings.Split(repo, "-") + lenParts := len(parts) + if lenParts > 3 && "refs/heads/"+parts[lenParts-3] != reqBody.RefName() { + w.WriteHeader(403) + enc.Encode(struct { + Message string `json:"message"` + }{fmt.Sprintf("Expected ref %q, got %q", "refs/heads/"+parts[lenParts-3], reqBody.RefName())}) + return + } + } + ll := &VerifiableLockList{} locks, nextCursor, err := getFilteredLocks(repo, "", reqBody.Cursor, diff --git a/test/test-credentials-no-prompt.sh b/test/test-credentials-no-prompt.sh index 3e85248e..a9fd287c 100755 --- a/test/test-credentials-no-prompt.sh +++ b/test/test-credentials-no-prompt.sh @@ -28,3 +28,25 @@ begin_test "attempt private access without credential helper" grep "Git credentials for $GITSERVER/$reponame not found" push.log ) end_test + +begin_test "askpass: push with bad askpass" +( + set -e + + reponame="askpass-with-bad-askpass" + setup_remote_repo "$reponame" + clone_repo "$reponame" "$reponame" + + git lfs track "*.dat" + echo "hello" > a.dat + + git add .gitattributes a.dat + git commit -m "initial commit" + + git config "credential.helper" "" + GIT_TERMINAL_PROMPT=0 GIT_ASKPASS="lfs-askpass-2" SSH_ASKPASS="dont-call-me" GIT_TRACE=1 git push origin master 2>&1 | tee push.log + grep "filling with GIT_ASKPASS" push.log # attempt askpass + grep 'credential fill error: exec: "lfs-askpass-2"' push.log # askpass fails + grep "creds: git credential fill" push.log # attempt git credential +) +end_test diff --git a/test/test-credentials.sh b/test/test-credentials.sh index dab525dd..fcef870f 100755 --- a/test/test-credentials.sh +++ b/test/test-credentials.sh @@ -4,11 +4,36 @@ ensure_git_version_isnt $VERSION_LOWER "2.3.0" +begin_test "credentails with url-specific helper skips askpass" +( + set -e + + reponame="url-specific-helper" + setup_remote_repo "$reponame" + + clone_repo "$reponame" "$reponame" + git config credential.useHttpPath false + git config credential.helper "" + git config credential.$GITSERVER.helper "lfstest" + + git lfs track "*.dat" + echo "hello" > a.dat + + git add .gitattributes a.dat + git commit -m "initial commit" + + # askpass is skipped + GIT_ASKPASS="lfs-bad-cmd" GIT_TRACE=1 git push origin master 2>&1 | tee push.log + + [ "0" -eq "$(grep "filling with GIT_ASKPASS" push.log | wc -l)" ] +) +end_test + begin_test "credentials without useHttpPath, with bad path password" ( set -e - reponame="$(basename "$0" ".sh")" + reponame="no-httppath-bad-password" setup_remote_repo "$reponame" printf "path:wrong" > "$CREDSDIR/127.0.0.1--$reponame" @@ -20,16 +45,55 @@ begin_test "credentials without useHttpPath, with bad path password" git lfs track "*.dat" 2>&1 | tee track.log grep "Tracking \"\*.dat\"" track.log - contents="a" - contents_oid=$(calc_oid "$contents") - - printf "$contents" > a.dat + printf "a" > a.dat git add a.dat git add .gitattributes git commit -m "add a.dat" - git push origin without-path 2>&1 | tee push.log + GIT_TRACE=1 git push origin without-path 2>&1 | tee push.log grep "(1 of 1 files)" push.log + + echo "approvals:" + [ "1" -eq "$(cat push.log | grep "creds: git credential approve" | wc -l)" ] + echo "fills:" + [ "1" -eq "$(cat push.log | grep "creds: git credential fill" | wc -l)" ] + + echo "credential calls have no path:" + credcalls="$(grep "creds: git credential" push.log)" + [ "0" -eq "$(echo "$credcalls" | grep "no-httppath-bad-password" | wc -l)" ] + expected="$(echo "$credcalls" | wc -l)" + [ "$expected" -eq "$(printf "$credcalls" | grep '", "")' | wc -l)" ] +) +end_test + +begin_test "credentials with url-specific useHttpPath, with bad path password" +( + set -e + + reponame="url-specific-httppath-bad-password" + setup_remote_repo "$reponame" + + printf "path:wrong" > "$CREDSDIR/127.0.0.1--$reponame" + + clone_repo "$reponame" with-url-specific-path + git config credential.$GITSERVER.useHttpPath false + git checkout -b without-path + + git lfs track "*.dat" 2>&1 | tee track.log + grep "Tracking \"\*.dat\"" track.log + + printf "a" > a.dat + git add a.dat + git add .gitattributes + git commit -m "add a.dat" + + GIT_TRACE=1 git push origin without-path 2>&1 | tee push.log + grep "(1 of 1 files)" push.log + + echo "approvals:" + [ "1" -eq "$(cat push.log | grep "creds: git credential approve" | wc -l)" ] + echo "fills:" + [ "1" -eq "$(cat push.log | grep "creds: git credential fill" | wc -l)" ] ) end_test @@ -37,7 +101,7 @@ begin_test "credentials with useHttpPath, with wrong password" ( set -e - reponame="$(basename "$0" ".sh")" + reponame="httppath-bad-password" setup_remote_repo "$reponame" printf "path:wrong" > "$CREDSDIR/127.0.0.1--$reponame" @@ -56,8 +120,12 @@ begin_test "credentials with useHttpPath, with wrong password" git add .gitattributes git commit -m "add a.dat" - git push origin with-path-wrong-pass 2>&1 | tee push.log + GIT_TRACE=1 git push origin with-path-wrong-pass 2>&1 | tee push.log [ "0" = "$(grep -c "(1 of 1 files)" push.log)" ] + echo "approvals:" + [ "0" -eq "$(cat push.log | grep "creds: git credential approve" | wc -l)" ] + echo "fills:" + [ "2" -eq "$(cat push.log | grep "creds: git credential fill" | wc -l)" ] ) end_test @@ -86,8 +154,17 @@ begin_test "credentials with useHttpPath, with correct password" git add .gitattributes git commit -m "add b.dat" - git push origin with-path-correct-pass 2>&1 | tee push.log + GIT_TRACE=1 git push origin with-path-correct-pass 2>&1 | tee push.log grep "(1 of 1 files)" push.log + echo "approvals:" + [ "1" -eq "$(cat push.log | grep "creds: git credential approve" | wc -l)" ] + echo "fills:" + [ "1" -eq "$(cat push.log | grep "creds: git credential fill" | wc -l)" ] + echo "credential calls have path:" + credcalls="$(grep "creds: git credential" push.log)" + [ "0" -eq "$(echo "$credcalls" | grep '", "")' | wc -l)" ] + expected="$(echo "$credcalls" | wc -l)" + [ "$expected" -eq "$(printf "$credcalls" | grep "test-credentials" | wc -l)" ] ) end_test @@ -175,8 +252,10 @@ begin_test "credentials from netrc" git add .gitattributes a.dat git commit -m "add a.dat" - git lfs push netrc master 2>&1 | tee push.log + GIT_TRACE=1 git lfs push netrc master 2>&1 | tee push.log grep "(1 of 1 files)" push.log + echo "any git credential calls:" + [ "0" -eq "$(cat push.log | grep "git credential" | wc -l)" ] ) end_test diff --git a/test/test-expired.sh b/test/test-expired.sh index beba72f7..5639442c 100755 --- a/test/test-expired.sh +++ b/test/test-expired.sh @@ -47,7 +47,6 @@ for typ in "${expiration_types[@]}"; do sshurl="${GITSERVER/http:\/\//ssh://git@}/$reponame" git config lfs.url "$sshurl" - git config lfs.cachecredentials "true" contents="contents" contents_oid="$(calc_oid "$contents")" diff --git a/test/test-extra-header.sh b/test/test-extra-header.sh index 1cac229b..823a7d62 100755 --- a/test/test-extra-header.sh +++ b/test/test-extra-header.sh @@ -25,3 +25,37 @@ begin_test "http..extraHeader" grep "> X-Foo: baz" curl.log ) end_test + +begin_test "http..extraHeader with authorization" +( + set -e + + reponame="requirecreds" + setup_remote_repo "$reponame" + clone_repo "$reponame" "$reponame" + + # See: test/cmd/lfstest-gitserver.go:1176. + user="requirecreds" + pass="pass" + auth="Basic $(echo -n $user:$pass | base64)" + + git config --add "http.extraHeader" "Authorization: $auth" + + git lfs track "*.dat" + printf "contents" > a.dat + git add .gitattributes a.dat + git commit -m "initial commit" + + git push origin master 2>&1 | tee curl.log + if [ "0" -ne "${PIPESTATUS[0]}" ]; then + echo >&2 "expected \`git push origin master\` to succeed, didn't" + exit 1 + fi + + [ "0" -eq "$(grep -c "creds: filling with GIT_ASKPASS" curl.log)" ] + [ "0" -eq "$(grep -c "creds: git credential approve" curl.log)" ] + [ "0" -eq "$(grep -c "creds: git credential cache" curl.log)" ] + [ "0" -eq "$(grep -c "creds: git credential fill" curl.log)" ] + [ "0" -eq "$(grep -c "creds: git credential reject" curl.log)" ] +) +end_test diff --git a/test/test-fetch.sh b/test/test-fetch.sh index f17f15b3..a8fa2a04 100755 --- a/test/test-fetch.sh +++ b/test/test-fetch.sh @@ -440,12 +440,6 @@ begin_test "fetch with no origin remote" # and no origin, but only 1 remote, should pick the only one as default git lfs fetch assert_local_object "$contents_oid" 1 - - # delete again, now add a second remote, also non-origin - rm -rf .git/lfs - git remote add something2 "$GITSERVER/$reponame" - git lfs fetch 2>&1 | grep "No default remote" - refute_local_object "$contents_oid" ) end_test diff --git a/test/test-happy-path.sh b/test/test-happy-path.sh index 1a74c77b..84adf85b 100755 --- a/test/test-happy-path.sh +++ b/test/test-happy-path.sh @@ -64,6 +64,35 @@ begin_test "happy path" ) end_test +begin_test "happy path on non-origin remote" +( + set -e + + reponame="happy-without-origin" + setup_remote_repo "$reponame" + + clone_repo "$reponame" repo-without-origin + git lfs track "*.dat" + git add .gitattributes + git commit -m "track" + git push origin master + + clone_repo "$reponame" clone-without-origin + git remote rename origin happy-path + + cd ../repo-without-origin + echo "a" > a.dat + git add a.dat + git commit -m "boom" + git push origin master + + cd ../clone-without-origin + echo "remotes:" + git remote + git pull happy-path master +) +end_test + begin_test "clears local temp objects" ( set -e diff --git a/test/test-migrate-import.sh b/test/test-migrate-import.sh index ccf57fb1..afb83ec0 100755 --- a/test/test-migrate-import.sh +++ b/test/test-migrate-import.sh @@ -202,6 +202,47 @@ begin_test "migrate import (given branch, exclude remote refs)" ) end_test +begin_test "migrate import (given ref, --skip-fetch)" +( + set -e + + setup_single_remote_branch + + md_master_oid="$(calc_oid "$(git cat-file -p "refs/heads/master:a.md")")" + md_remote_oid="$(calc_oid "$(git cat-file -p "refs/remotes/origin/master:a.md")")" + txt_master_oid="$(calc_oid "$(git cat-file -p "refs/heads/master:a.txt")")" + txt_remote_oid="$(calc_oid "$(git cat-file -p "refs/remotes/origin/master:a.txt")")" + + git tag pseudo-remote "$(git rev-parse refs/remotes/origin/master)" + # Remove the refs/remotes/origin/master ref, and instruct 'git lfs migrate' to + # not fetch it. + git update-ref -d refs/remotes/origin/master + + git lfs migrate import --skip-fetch + + assert_pointer "refs/heads/master" "a.md" "$md_master_oid" "50" + assert_pointer "pseudo-remote" "a.md" "$md_remote_oid" "140" + assert_pointer "refs/heads/master" "a.txt" "$txt_master_oid" "30" + assert_pointer "pseudo-remote" "a.txt" "$txt_remote_oid" "120" + + assert_local_object "$md_master_oid" "50" + assert_local_object "$txt_master_oid" "30" + assert_local_object "$md_remote_oid" "140" + assert_local_object "$txt_remote_oid" "120" + + master="$(git rev-parse refs/heads/master)" + remote="$(git rev-parse pseudo-remote)" + + master_attrs="$(git cat-file -p "$master:.gitattributes")" + remote_attrs="$(git cat-file -p "$remote:.gitattributes")" + + echo "$master_attrs" | grep -q "*.md filter=lfs diff=lfs merge=lfs" + echo "$master_attrs" | grep -q "*.txt filter=lfs diff=lfs merge=lfs" + echo "$remote_attrs" | grep -q "*.md filter=lfs diff=lfs merge=lfs" + echo "$remote_attrs" | grep -q "*.txt filter=lfs diff=lfs merge=lfs" +) +end_test + begin_test "migrate import (include/exclude ref)" ( set -e @@ -380,6 +421,20 @@ begin_test "migrate import (--everything)" ) end_test +begin_test "migrate import (ambiguous reference)" +( + set -e + + setup_multiple_local_branches + + # Create an ambiguously named reference sharing the name as the SHA-1 of + # "HEAD". + sha="$(git rev-parse HEAD)" + git tag "$sha" + + git lfs migrate import --everything +) +end_test begin_test "migrate import (--everything with args)" ( @@ -403,8 +458,6 @@ begin_test "migrate import (--everything with --include-ref)" ) end_test -exit 0 - begin_test "migrate import (--everything with --exclude-ref)" ( set -e diff --git a/test/test-migrate-info.sh b/test/test-migrate-info.sh index 58fef7cf..ee641082 100755 --- a/test/test-migrate-info.sh +++ b/test/test-migrate-info.sh @@ -132,6 +132,33 @@ begin_test "migrate info (given branch, exclude remote refs)" ) end_test +begin_test "migrate info (given ref, --skip-fetch)" +( + set -e + + setup_single_remote_branch + + original_remote="$(git rev-parse refs/remotes/origin/master)" + original_master="$(git rev-parse refs/heads/master)" + + git tag pseudo-remote "$original_remote" + # Remove the refs/remotes/origin/master ref, and instruct 'git lfs migrate' to + # not fetch it. + git update-ref -d refs/remotes/origin/master + + diff -u <(git lfs migrate info --skip-fetch 2>&1 | tail -n 2) <(cat <<-EOF + *.md 190 B 2/2 files(s) 100% + *.txt 150 B 2/2 files(s) 100% + EOF) + + migrated_remote="$(git rev-parse pseudo-remote)" + migrated_master="$(git rev-parse refs/heads/master)" + + assert_ref_unmoved "refs/remotes/origin/master" "$original_remote" "$migrated_remote" + assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master" +) +end_test + begin_test "migrate info (include/exclude ref)" ( set -e @@ -307,6 +334,21 @@ begin_test "migrate info (--everything)" ) end_test +begin_test "migrate info (ambiguous reference)" +( + set -e + + setup_multiple_local_branches + + # Create an ambiguously named reference sharing the name as the SHA-1 of + # "HEAD". + sha="$(git rev-parse HEAD)" + git tag "$sha" + + git lfs migrate info --everything +) +end_test + begin_test "migrate info (--everything with args)" ( set -e diff --git a/test/test-pre-push.sh b/test/test-pre-push.sh index 6816cca7..071c34f9 100755 --- a/test/test-pre-push.sh +++ b/test/test-pre-push.sh @@ -583,7 +583,7 @@ begin_test "pre-push with our lock" git commit -m "add unauthorized changes" GIT_CURL_VERBOSE=1 git push origin master 2>&1 | tee push.log - grep "Consider unlocking your own locked file(s)" push.log + grep "Consider unlocking your own locked files" push.log grep "* locked.dat" push.log assert_server_lock "$id" @@ -631,7 +631,7 @@ begin_test "pre-push with their lock on lfs file" exit 1 fi - grep "Unable to push 1 locked file(s)" push.log + grep "Unable to push locked files" push.log grep "* locked_theirs.dat - Git LFS Tests" push.log grep "ERROR: Cannot update locked files." push.log @@ -687,7 +687,7 @@ begin_test "pre-push with their lock on non-lfs lockable file" exit 1 fi - grep "Unable to push 2 locked file(s)" push.log + grep "Unable to push locked files" push.log grep "* large_locked_theirs.dat - Git LFS Tests" push.log grep "* tiny_locked_theirs.dat - Git LFS Tests" push.log grep "ERROR: Cannot update locked files." push.log @@ -777,6 +777,96 @@ begin_test "pre-push disable locks verify on partial url" ) end_test +begin_test "pre-push locks verify 403 with good ref" +( + set -e + + reponame="lock-verify-master-branch-required" + setup_remote_repo "$reponame" + clone_repo "$reponame" "$reponame" + + contents="example" + contents_oid="$(calc_oid "$contents")" + printf "$contents" > a.dat + git lfs track "*.dat" + git add .gitattributes a.dat + git commit --message "initial commit" + + git config "lfs.$GITSERVER/$reponame.git.locksverify" true + git push origin master 2>&1 | tee push.log + + assert_server_object "$reponame" "$contents_oid" +) +end_test + +begin_test "pre-push locks verify 403 with good tracked ref" +( + set -e + + reponame="lock-verify-tracked-branch-required" + setup_remote_repo "$reponame" + clone_repo "$reponame" "$reponame" + + contents="example" + contents_oid="$(calc_oid "$contents")" + printf "$contents" > a.dat + git lfs track "*.dat" + git add .gitattributes a.dat + git commit --message "initial commit" + + git config push.default upstream + git config branch.master.merge refs/heads/tracked + git config "lfs.$GITSERVER/$reponame.git.locksverify" true + git push 2>&1 | tee push.log + + assert_server_object "$reponame" "$contents_oid" +) +end_test + +begin_test "pre-push locks verify 403 with explicit ref" +( + set -e + + reponame="lock-verify-explicit-branch-required" + setup_remote_repo "$reponame" + clone_repo "$reponame" "$reponame" + + contents="example" + contents_oid="$(calc_oid "$contents")" + printf "$contents" > a.dat + git lfs track "*.dat" + git add .gitattributes a.dat + git commit --message "initial commit" + + git config "lfs.$GITSERVER/$reponame.git.locksverify" true + git push origin master:explicit 2>&1 | tee push.log + + assert_server_object "$reponame" "$contents_oid" +) +end_test + +begin_test "pre-push locks verify 403 with bad ref" +( + set -e + + reponame="lock-verify-other-branch-required" + setup_remote_repo "$reponame" + clone_repo "$reponame" "$reponame" + + contents="example" + contents_oid="$(calc_oid "$contents")" + printf "$contents" > a.dat + git lfs track "*.dat" + git add .gitattributes a.dat + git commit --message "initial commit" + + git config "lfs.$GITSERVER/$reponame.git.locksverify" true + git push origin master 2>&1 | tee push.log + grep "failed to push some refs" push.log + refute_server_object "$reponame" "$contents_oid" +) +end_test + begin_test "pre-push locks verify 5xx with verification unset" ( set -e diff --git a/test/test-push.sh b/test/test-push.sh index bbf58f1b..015a6e09 100755 --- a/test/test-push.sh +++ b/test/test-push.sh @@ -485,15 +485,8 @@ begin_test "push ambiguous branch name" # lfs push master, should work git lfs push origin master - # push ambiguous, should fail - set +e + # push ambiguous, does not fail since lfs scans git with sha, not ref name git lfs push origin ambiguous - if [ $? -eq 0 ] - then - exit 1 - fi - set -e - ) end_test diff --git a/test/test-track.sh b/test/test-track.sh index 03ac091c..65ec3423 100755 --- a/test/test-track.sh +++ b/test/test-track.sh @@ -513,3 +513,30 @@ begin_test "track (\$GIT_LFS_TRACK_NO_INSTALL_HOOKS)" [ ! -f .git/hooks/post-merge ] ) end_test + +begin_test "track (with comments)" +( + set -e + + reponame="track-with=comments" + git init "$reponame" + cd "$reponame" + + echo "*.jpg filter=lfs diff=lfs merge=lfs -text" >> .gitattributes + echo "# *.png filter=lfs diff=lfs merge=lfs -text" >> .gitattributes + echo "*.pdf filter=lfs diff=lfs merge=lfs -text" >> .gitattributes + + git add .gitattributes + git commit -m "initial commit" + + git lfs track 2>&1 | tee track.log + if [ "0" -ne "${PIPESTATUS[0]}" ]; then + echo >&2 "expected \`git lfs track\` command to exit cleanly, didn't" + exit 1 + fi + + [ "1" -eq "$(grep -c "\.jpg" track.log)" ] + [ "1" -eq "$(grep -c "\.pdf" track.log)" ] + [ "0" -eq "$(grep -c "\.png" track.log)" ] +) +end_test diff --git a/test/testhelpers.sh b/test/testhelpers.sh index c7fe7660..445f2fb3 100644 --- a/test/testhelpers.sh +++ b/test/testhelpers.sh @@ -266,15 +266,20 @@ size %s wait_for_file() { local filename="$1" n=0 - while [ $n -lt 10 ]; do + wait_time=1 + while [ $n -lt 17 ]; do if [ -s $filename ]; then return 0 fi - sleep 0.5 + sleep $wait_time n=`expr $n + 1` + if [ $wait_time -lt 4 ]; then + wait_time=`expr $wait_time \* 2` + fi done + echo "$filename did not appear after 60 seconds." return 1 } diff --git a/test/testlib.sh b/test/testlib.sh index 93055bca..ea42fdfb 100644 --- a/test/testlib.sh +++ b/test/testlib.sh @@ -92,6 +92,10 @@ begin_test () { mkdir "$HOME" cp "$TESTHOME/.gitconfig" "$HOME/.gitconfig" + # do not let Git use a different configuration file + unset GIT_CONFIG + unset XDG_CONFIG_HOME + # allow the subshell to exit non-zero without exiting this process set -x +e }