Merge branch 'master' into suggest-double-quotes

This commit is contained in:
risk danger olson 2017-06-14 10:05:32 -06:00 committed by GitHub
commit 9ded63f7ec
36 changed files with 605 additions and 112 deletions

@ -105,7 +105,7 @@ func fetchCommand(cmd *cobra.Command, args []string) {
if !success {
c := getAPIClient()
e := c.Endpoints.Endpoint("download", cfg.CurrentRemote)
Exit("error: failed to push some objects to '%s'", e.Url)
Exit("error: failed to fetch some objects from '%s'", e.Url)
}
}

@ -13,6 +13,7 @@ import (
"github.com/git-lfs/git-lfs/localstorage"
"github.com/git-lfs/git-lfs/progress"
"github.com/git-lfs/git-lfs/tools"
"github.com/git-lfs/git-lfs/tools/humanize"
"github.com/git-lfs/git-lfs/tq"
"github.com/rubyist/tracerx"
"github.com/spf13/cobra"
@ -142,7 +143,7 @@ func prune(fetchPruneConfig config.FetchPruneConfig, verifyRemote, dryRun, verbo
totalSize += file.Size
if verbose {
// Save up verbose output for the end, spinner still going
verboseOutput.WriteString(fmt.Sprintf(" * %v (%v)\n", file.Oid, humanizeBytes(file.Size)))
verboseOutput.WriteString(fmt.Sprintf(" * %v (%v)\n", file.Oid, humanize.FormatBytes(uint64(file.Size))))
}
if verifyRemote {
@ -171,12 +172,12 @@ func prune(fetchPruneConfig config.FetchPruneConfig, verifyRemote, dryRun, verbo
return
}
if dryRun {
Print("%d files would be pruned (%v)", len(prunableObjects), humanizeBytes(totalSize))
Print("%d files would be pruned (%v)", len(prunableObjects), humanize.FormatBytes(uint64(totalSize)))
if verbose {
Print(verboseOutput.String())
}
} else {
Print("Pruning %d files, (%v)", len(prunableObjects), humanizeBytes(totalSize))
Print("Pruning %d files, (%v)", len(prunableObjects), humanize.FormatBytes(uint64(totalSize)))
if verbose {
Print(verboseOutput.String())
}
@ -461,26 +462,6 @@ func pruneTaskGetReachableObjects(gitscanner *lfs.GitScanner, outObjectSet *tool
}
}
var byteUnits = []string{"B", "KB", "MB", "GB", "TB"}
func humanizeBytes(bytes int64) string {
var output string
size := float64(bytes)
if bytes < 1024 {
return fmt.Sprintf("%d B", bytes)
}
for _, unit := range byteUnits {
if size < 1024.0 {
output = fmt.Sprintf("%3.1f %s", size, unit)
break
}
size /= 1024.0
}
return output
}
func init() {
RegisterCommand("prune", pruneCommand, func(cmd *cobra.Command) {
cmd.Flags().BoolVarP(&pruneDryRunArg, "dry-run", "d", false, "Don't delete anything, just report")

@ -113,7 +113,7 @@ func pull(remote string, filter *filepathfilter.Filter) {
if !success {
c := getAPIClient()
e := c.Endpoints.Endpoint("download", remote)
Exit("error: failed to push some objects to '%s'", e.Url)
Exit("error: failed to fetch some objects from '%s'", e.Url)
}
}

@ -2,6 +2,7 @@ package commands
import (
"crypto/sha256"
"encoding/json"
"fmt"
"io"
"os"
@ -14,7 +15,8 @@ import (
)
var (
porcelain = false
porcelain = false
statusJson = false
)
func statusCommand(cmd *cobra.Command, args []string) {
@ -31,6 +33,9 @@ func statusCommand(cmd *cobra.Command, args []string) {
if porcelain {
porcelainStagedPointers(scanIndexAt)
return
} else if statusJson {
jsonStagedPointers(scanIndexAt)
return
}
statusScanRefRange(ref)
@ -224,6 +229,43 @@ func statusScanRefRange(ref *git.Ref) {
}
type JSONStatusEntry struct {
Status string `json:"status"`
From string `json:"from,omitempty"`
}
type JSONStatus struct {
Files map[string]JSONStatusEntry `json:"files"`
}
func jsonStagedPointers(ref string) {
staged, unstaged, err := scanIndex(ref)
if err != nil {
ExitWithError(err)
}
status := JSONStatus{Files: make(map[string]JSONStatusEntry)}
for _, entry := range append(unstaged, staged...) {
switch entry.Status {
case lfs.StatusRename, lfs.StatusCopy:
status.Files[entry.DstName] = JSONStatusEntry{
Status: string(entry.Status), From: entry.SrcName,
}
default:
status.Files[entry.SrcName] = JSONStatusEntry{
Status: string(entry.Status),
}
}
}
ret, err := json.Marshal(status)
if err != nil {
ExitWithError(err)
}
Print(string(ret))
}
func porcelainStagedPointers(ref string) {
staged, unstaged, err := scanIndex(ref)
if err != nil {
@ -260,5 +302,6 @@ func porcelainStatusLine(entry *lfs.DiffIndexEntry) string {
func init() {
RegisterCommand("status", statusCommand, func(cmd *cobra.Command) {
cmd.Flags().BoolVarP(&porcelain, "porcelain", "p", false, "Give the output in an easy-to-parse format for scripts.")
cmd.Flags().BoolVarP(&statusJson, "json", "j", false, "Give the output in a stable json format for scripts.")
})
}

@ -22,6 +22,8 @@ Display paths of Git LFS objects that
* `--porcelain`:
Give the output in an easy-to-parse format for scripts.
* `--json`:
Give the output in a stable json format for scripts.
## SEE ALSO

@ -2,6 +2,7 @@ package errors
import (
"fmt"
"net/url"
"github.com/pkg/errors"
)
@ -134,6 +135,9 @@ func IsRetriableError(err error) bool {
}); ok {
return e.RetriableError()
}
if cause, ok := Cause(err).(*url.Error); ok {
return cause.Temporary() || cause.Timeout()
}
if parent := parentOf(err); parent != nil {
return IsRetriableError(parent)
}

46
errors/types_test.go Normal file

@ -0,0 +1,46 @@
package errors_test
import (
"net/url"
"testing"
"github.com/git-lfs/git-lfs/errors"
"github.com/stretchr/testify/assert"
)
type TemporaryError struct {
}
func (e TemporaryError) Error() string {
return ""
}
func (e TemporaryError) Temporary() bool {
return true
}
type TimeoutError struct {
}
func (e TimeoutError) Error() string {
return ""
}
func (e TimeoutError) Timeout() bool {
return true
}
func TestCanRetryOnTemporaryError(t *testing.T) {
err := &url.Error{Err: TemporaryError{}}
assert.True(t, errors.IsRetriableError(err))
}
func TestCanRetryOnTimeoutError(t *testing.T) {
err := &url.Error{Err: TimeoutError{}}
assert.True(t, errors.IsRetriableError(err))
}
func TestCannotRetryOnGenericUrlError(t *testing.T) {
err := &url.Error{Err: errors.New("")}
assert.False(t, errors.IsRetriableError(err))
}

@ -0,0 +1 @@
ref: refs/heads/master

@ -0,0 +1,7 @@
[core]
repositoryformatversion = 0
filemode = true
bare = false
logallrefupdates = true
ignorecase = true
precomposeunicode = true

@ -0,0 +1,3 @@
0000000000000000000000000000000000000000 91b85be6928569390e937479509b80a1d0dccb0c Taylor Blau <me@ttaylorr.com> 1496954196 -0600 commit (initial): some.txt: a
91b85be6928569390e937479509b80a1d0dccb0c 228afe30855933151f7a88e70d9d88314fd2f191 Taylor Blau <me@ttaylorr.com> 1496954207 -0600 commit: some.txt: b
228afe30855933151f7a88e70d9d88314fd2f191 d941e4756add6b06f5bee766fcf669f55419f13f Taylor Blau <me@ttaylorr.com> 1496954214 -0600 commit: some.txt: c

@ -0,0 +1,3 @@
0000000000000000000000000000000000000000 91b85be6928569390e937479509b80a1d0dccb0c Taylor Blau <me@ttaylorr.com> 1496954196 -0600 commit (initial): some.txt: a
91b85be6928569390e937479509b80a1d0dccb0c 228afe30855933151f7a88e70d9d88314fd2f191 Taylor Blau <me@ttaylorr.com> 1496954207 -0600 commit: some.txt: b
228afe30855933151f7a88e70d9d88314fd2f191 d941e4756add6b06f5bee766fcf669f55419f13f Taylor Blau <me@ttaylorr.com> 1496954214 -0600 commit: some.txt: c

@ -0,0 +1 @@
d941e4756add6b06f5bee766fcf669f55419f13f

@ -0,0 +1 @@
228afe30855933151f7a88e70d9d88314fd2f191

@ -35,10 +35,12 @@ type Rewriter struct {
// RewriteOptions is an options type given to the Rewrite() function.
type RewriteOptions struct {
// Left is the starting commit.
Left string
// Right is the ending commit.
Right string
// Include is the list of refs of which commits reachable by that ref
// will be included.
Include []string
// Exclude is the list of refs of which commits reachable by that ref
// will be excluded.
Exclude []string
// BlobFn specifies a function to rewrite blobs.
//
@ -142,9 +144,8 @@ func NewRewriter(db *odb.ObjectDatabase, opts ...rewriterOption) *Rewriter {
// Rewrite rewrites the range of commits given by *RewriteOptions.{Left,Right}
// using the BlobRewriteFn to rewrite the individual blobs.
func (r *Rewriter) Rewrite(opt *RewriteOptions) ([]byte, error) {
// First, construct a scanner to iterate through the range of commits to
// rewrite.
scanner, err := git.NewRevListScanner(opt.Left, opt.Right, r.scannerOpts())
// First, obtain a list of commits to rewrite.
commits, err := r.commitsToMigrate(opt)
if err != nil {
return nil, err
}
@ -152,10 +153,10 @@ func (r *Rewriter) Rewrite(opt *RewriteOptions) ([]byte, error) {
// Keep track of the last commit that we rewrote. Callers often want
// this so that they can perform a git-update-ref(1).
var tip []byte
for scanner.Scan() {
for _, oid := range commits {
// Load the original commit to access the data necessary in
// order to rewrite it.
original, err := r.db.Commit(scanner.OID())
original, err := r.db.Commit(oid)
if err != nil {
return nil, err
}
@ -172,11 +173,24 @@ func (r *Rewriter) Rewrite(opt *RewriteOptions) ([]byte, error) {
//
// This operation is safe since we are visiting the commits in
// reverse topological order and therefore have seen all parents
// before children (in other words, r.uncacheCommit(parent) will
// always return a value).
// before children (in other words, r.uncacheCommit(...) will
// always return a value, if the prospective parent is a part of
// the migration).
rewrittenParents := make([][]byte, 0, len(original.ParentIDs))
for _, parent := range original.ParentIDs {
rewrittenParents = append(rewrittenParents, r.uncacheCommit(parent))
for _, originalParent := range original.ParentIDs {
rewrittenParent, ok := r.uncacheCommit(originalParent)
if !ok {
// If we haven't seen the parent before, this
// means that we're doing a partial migration
// and the parent that we're looking for isn't
// included.
//
// Use the original parent to properly link
// history across the migration boundary.
rewrittenParent = originalParent
}
rewrittenParents = append(rewrittenParents, rewrittenParent)
}
// Construct a new commit using the original header information,
@ -196,15 +210,11 @@ func (r *Rewriter) Rewrite(opt *RewriteOptions) ([]byte, error) {
// Cache that commit so that we can reassign children of this
// commit.
r.cacheCommit(scanner.OID(), rewrittenCommit)
r.cacheCommit(oid, rewrittenCommit)
// Move the tip forward.
tip = rewrittenCommit
}
if err = scanner.Err(); err != nil {
return nil, err
}
return tip, err
}
@ -304,6 +314,32 @@ func (r *Rewriter) rewriteBlob(from []byte, path string, fn BlobRewriteFn) ([]by
return sha, nil
}
// commitsToMigrate returns an in-memory copy of a list of commits according to
// the output of git-rev-list(1) (given the *RewriteOptions), where each
// outputted commit is 20 bytes of raw SHA1.
//
// If any error was encountered, it will be returned.
func (r *Rewriter) commitsToMigrate(opt *RewriteOptions) ([][]byte, error) {
scanner, err := git.NewRevListScanner(
opt.Include, opt.Exclude, r.scannerOpts())
if err != nil {
return nil, err
}
var commits [][]byte
for scanner.Scan() {
commits = append(commits, scanner.OID())
}
if err = scanner.Err(); err != nil {
return nil, err
}
if err = scanner.Close(); err != nil {
return nil, err
}
return commits, nil
}
// scannerOpts returns a *git.ScanRefsOptions instance to be given to the
// *git.RevListScanner.
//
@ -364,10 +400,12 @@ func (r *Rewriter) cacheCommit(from, to []byte) {
// uncacheCommit returns a *git/odb.Commit that is cached from the given
// *git/odb.Commit "from". That is to say, it returns the *git/odb.Commit that
// "from" should be rewritten to, or nil if none could be found.
func (r *Rewriter) uncacheCommit(from []byte) []byte {
// "from" should be rewritten to and true, or nil and false if none could be
// found.
func (r *Rewriter) uncacheCommit(from []byte) ([]byte, bool) {
r.mu.Lock()
defer r.mu.Unlock()
return r.commits[hex.EncodeToString(from)]
c, ok := r.commits[hex.EncodeToString(from)]
return c, ok
}

@ -19,7 +19,7 @@ func TestRewriterRewritesHistory(t *testing.T) {
db := DatabaseFromFixture(t, "linear-history.git")
r := NewRewriter(db)
tip, err := r.Rewrite(&RewriteOptions{Left: "master",
tip, err := r.Rewrite(&RewriteOptions{Include: []string{"refs/heads/master"},
BlobFn: func(path string, b *odb.Blob) (*odb.Blob, error) {
contents, err := ioutil.ReadAll(b.Contents)
if err != nil {
@ -80,7 +80,7 @@ func TestRewriterRewritesOctopusMerges(t *testing.T) {
db := DatabaseFromFixture(t, "octopus-merge.git")
r := NewRewriter(db)
tip, err := r.Rewrite(&RewriteOptions{Left: "master",
tip, err := r.Rewrite(&RewriteOptions{Include: []string{"refs/heads/master"},
BlobFn: func(path string, b *odb.Blob) (*odb.Blob, error) {
return &odb.Blob{
Contents: io.MultiReader(b.Contents, strings.NewReader("_new")),
@ -127,7 +127,7 @@ func TestRewriterDoesntVisitUnchangedSubtrees(t *testing.T) {
seen := make(map[string]int)
_, err := r.Rewrite(&RewriteOptions{Left: "master",
_, err := r.Rewrite(&RewriteOptions{Include: []string{"refs/heads/master"},
BlobFn: func(path string, b *odb.Blob) (*odb.Blob, error) {
seen[path] = seen[path] + 1
@ -145,7 +145,7 @@ func TestRewriterVisitsUniqueEntriesWithIdenticalContents(t *testing.T) {
db := DatabaseFromFixture(t, "identical-blobs.git")
r := NewRewriter(db)
tip, err := r.Rewrite(&RewriteOptions{Left: "master",
tip, err := r.Rewrite(&RewriteOptions{Include: []string{"refs/heads/master"},
BlobFn: func(path string, b *odb.Blob) (*odb.Blob, error) {
if path == root("b.txt") {
return b, nil
@ -185,7 +185,7 @@ func TestRewriterIgnoresPathsThatDontMatchFilter(t *testing.T) {
seen := make(map[string]int)
_, err := r.Rewrite(&RewriteOptions{Left: "master",
_, err := r.Rewrite(&RewriteOptions{Include: []string{"refs/heads/master"},
BlobFn: func(path string, b *odb.Blob) (*odb.Blob, error) {
seen[path] = seen[path] + 1
@ -208,7 +208,7 @@ func TestRewriterAllowsAdditionalTreeEntries(t *testing.T) {
})
assert.Nil(t, err)
tip, err := r.Rewrite(&RewriteOptions{Left: "master",
tip, err := r.Rewrite(&RewriteOptions{Include: []string{"refs/heads/master"},
BlobFn: func(path string, b *odb.Blob) (*odb.Blob, error) {
return b, nil
},
@ -266,6 +266,39 @@ func TestRewriterAllowsAdditionalTreeEntries(t *testing.T) {
AssertBlobContents(t, db, tree3, "extra.txt", "extra\n")
}
func TestHistoryRewriterUseOriginalParentsForPartialMigration(t *testing.T) {
db := DatabaseFromFixture(t, "linear-history-with-tags.git")
r := NewRewriter(db)
tip, err := r.Rewrite(&RewriteOptions{
Include: []string{"refs/heads/master"},
Exclude: []string{"refs/tags/middle"},
BlobFn: func(path string, b *odb.Blob) (*odb.Blob, error) {
return b, nil
},
})
// After rewriting, the rewriter should have only modified the latest
// commit (HEAD), and excluded the first two, both reachable by
// refs/tags/middle.
//
// This should modify one commit, and appropriately link the parent as
// follows:
//
// tree 20ecedad3e74a113695fe5f00ab003694e2e1e9c
// parent 228afe30855933151f7a88e70d9d88314fd2f191
// author Taylor Blau <me@ttaylorr.com> 1496954214 -0600
// committer Taylor Blau <me@ttaylorr.com> 1496954214 -0600
//
// some.txt: c
expectedParent := "228afe30855933151f7a88e70d9d88314fd2f191"
assert.NoError(t, err)
AssertCommitParent(t, db, hex.EncodeToString(tip), expectedParent)
}
func root(path string) string {
if !strings.HasPrefix(path, string(os.PathSeparator)) {
path = string(os.PathSeparator) + path

@ -104,13 +104,11 @@ func (c *Commit) Decode(from io.Reader, size int64) (n int, err error) {
continue
}
fields := strings.Fields(text)
if len(fields) > 0 {
if fields := strings.Fields(text); len(fields) > 0 && !finishedHeaders {
switch fields[0] {
case "tree":
id, err := hex.DecodeString(fields[1])
if err != nil {
panic(1)
return n, err
}
c.TreeID = id
@ -125,15 +123,13 @@ func (c *Commit) Decode(from io.Reader, size int64) (n int, err error) {
case "committer":
c.Committer = strings.Join(fields[1:], " ")
default:
if finishedHeaders {
messageParts = append(messageParts, s.Text())
} else {
c.ExtraHeaders = append(c.ExtraHeaders, &ExtraHeader{
K: fields[0],
V: strings.Join(fields[1:], " "),
})
}
c.ExtraHeaders = append(c.ExtraHeaders, &ExtraHeader{
K: fields[0],
V: strings.Join(fields[1:], " "),
})
}
} else {
messageParts = append(messageParts, s.Text())
}
}

@ -83,6 +83,33 @@ func TestCommitDecoding(t *testing.T) {
assert.Equal(t, "initial commit", commit.Message)
}
func TestCommitDecodingWithMessageKeywordPrefix(t *testing.T) {
author := &Signature{Name: "John Doe", Email: "john@example.com", When: time.Now()}
committer := &Signature{Name: "Jane Doe", Email: "jane@example.com", When: time.Now()}
treeId := []byte("aaaaaaaaaaaaaaaaaaaa")
treeIdAscii := hex.EncodeToString(treeId)
from := new(bytes.Buffer)
fmt.Fprintf(from, "author %s\n", author)
fmt.Fprintf(from, "committer %s\n", committer)
fmt.Fprintf(from, "tree %s\n", hex.EncodeToString(treeId))
fmt.Fprintf(from, "\ntree <- initial commit\n")
flen := from.Len()
commit := new(Commit)
n, err := commit.Decode(from, int64(flen))
assert.NoError(t, err)
assert.Equal(t, flen, n)
assert.Equal(t, author.String(), commit.Author)
assert.Equal(t, committer.String(), commit.Committer)
assert.Equal(t, treeIdAscii, hex.EncodeToString(commit.TreeID))
assert.Equal(t, "tree <- initial commit", commit.Message)
}
func assertLine(t *testing.T, buf *bytes.Buffer, wanted string, args ...interface{}) {
got, err := buf.ReadString('\n')

@ -50,7 +50,7 @@ func (t *Tree) Decode(from io.Reader, size int64) (n int, err error) {
fname = strings.TrimSuffix(fname, "\x00")
var sha [20]byte
if _, err = buf.Read(sha[:]); err != nil {
if _, err = io.ReadFull(buf, sha[:]); err != nil {
return n, err
}
n += 20

@ -1,6 +1,7 @@
package odb
import (
"bufio"
"bytes"
"fmt"
"strconv"
@ -101,6 +102,30 @@ func TestTreeDecoding(t *testing.T) {
}, tree.Entries[3])
}
func TestTreeDecodingShaBoundary(t *testing.T) {
var from bytes.Buffer
fmt.Fprintf(&from, "%s %s\x00%s",
strconv.FormatInt(int64(0100644), 8),
"a.dat", []byte("aaaaaaaaaaaaaaaaaaaa"))
flen := from.Len()
tree := new(Tree)
n, err := tree.Decode(bufio.NewReaderSize(&from, flen-2), int64(flen))
assert.Nil(t, err)
assert.Equal(t, flen, n)
require.Len(t, tree.Entries, 1)
assert.Equal(t, &TreeEntry{
Name: "a.dat",
Type: BlobObjectType,
Oid: []byte("aaaaaaaaaaaaaaaaaaaa"),
Filemode: 0100644,
}, tree.Entries[0])
}
func assertTreeEntry(t *testing.T, buf *bytes.Buffer,
name string, typ ObjectType, oid []byte, mode int32) {

@ -24,8 +24,8 @@ const (
ScanRefsMode ScanningMode = iota
// ScanAllMode will scan all history.
ScanAllMode
// ScanLeftToRemoteMode will scan the difference between "left" and a
// remote tracking ref.
// ScanLeftToRemoteMode will scan the difference between any included
// SHA1s and a remote tracking ref.
ScanLeftToRemoteMode
)
@ -160,15 +160,15 @@ var (
z40 = regexp.MustCompile(`\^?0{40}`)
)
// NewRevListScanner instantiates a new RevListScanner instance scanning between
// the "left" and "right" commitish (commit, refspec) and scanning using the
// *ScanRefsOptions "opt" configuration.
// NewRevListScanner instantiates a new RevListScanner instance scanning all
// revisions reachable by refs contained in "include" and not reachable by any
// refs included in "excluded", using the *ScanRefsOptions "opt" configuration.
//
// It returns a new *RevListScanner instance, or an error if one was
// encountered. Upon returning, the `git-rev-list(1)` instance is already
// running, and Scan() may be called immediately.
func NewRevListScanner(left, right string, opt *ScanRefsOptions) (*RevListScanner, error) {
stdin, args, err := revListArgs(left, right, opt)
func NewRevListScanner(include, excluded []string, opt *ScanRefsOptions) (*RevListScanner, error) {
stdin, args, err := revListArgs(include, excluded, opt)
if err != nil {
return nil, err
}
@ -218,13 +218,13 @@ func NewRevListScanner(left, right string, opt *ScanRefsOptions) (*RevListScanne
}, nil
}
// revListArgs returns the arguments for a given left, right, and
// ScanRefsOptions instance.
// revListArgs returns the arguments for a given included and excluded set of
// SHA1s, and ScanRefsOptions instance.
//
// In order, it returns the contents of stdin as an io.Reader, the args passed
// to git as a []string, and any error encountered in generating those if one
// occurred.
func revListArgs(l, r string, opt *ScanRefsOptions) (io.Reader, []string, error) {
func revListArgs(include, exclude []string, opt *ScanRefsOptions) (io.Reader, []string, error) {
var stdin io.Reader
args := []string{"rev-list"}
if !opt.CommitsOnly {
@ -247,19 +247,17 @@ func revListArgs(l, r string, opt *ScanRefsOptions) (io.Reader, []string, error)
args = append(args, "--do-walk")
}
args = append(args, l)
if len(r) > 0 && !z40.MatchString(r) {
args = append(args, r)
}
args = append(args, includeExcludeShas(include, exclude)...)
case ScanAllMode:
args = append(args, "--all")
case ScanLeftToRemoteMode:
if len(opt.SkippedRefs) == 0 {
args = append(args, l, "--not", "--remotes="+opt.Remote)
args = append(args, includeExcludeShas(include, exclude)...)
args = append(args, "--not", "--remotes="+opt.Remote)
} else {
args = append(args, "--stdin")
stdin = strings.NewReader(strings.Join(
append([]string{l}, opt.SkippedRefs...), "\n"),
append(includeExcludeShas(include, exclude), opt.SkippedRefs...), "\n"),
)
}
default:
@ -268,6 +266,34 @@ func revListArgs(l, r string, opt *ScanRefsOptions) (io.Reader, []string, error)
return stdin, append(args, "--"), nil
}
func includeExcludeShas(include, exclude []string) []string {
include = nonZeroShas(include)
exclude = nonZeroShas(exclude)
args := make([]string, 0, len(include)+len(exclude))
for _, i := range include {
args = append(args, i)
}
for _, x := range exclude {
args = append(args, fmt.Sprintf("^%s", x))
}
return args
}
func nonZeroShas(all []string) []string {
nz := make([]string, 0, len(all))
for _, sha := range all {
if len(sha) > 0 && !z40.MatchString(sha) {
nz = append(nz, sha)
}
}
return nz
}
// Name is an optional field that gives the name of the object (if the object is
// a tree, blob).
//

@ -14,9 +14,9 @@ import (
)
type ArgsTestCase struct {
Left string
Right string
Opt *ScanRefsOptions
Include []string
Exclude []string
Opt *ScanRefsOptions
ExpectedStdin string
ExpectedArgs []string
@ -24,7 +24,7 @@ type ArgsTestCase struct {
}
func (c *ArgsTestCase) Assert(t *testing.T) {
stdin, args, err := revListArgs(c.Left, c.Right, c.Opt)
stdin, args, err := revListArgs(c.Include, c.Exclude, c.Opt)
if len(c.ExpectedErr) > 0 {
assert.EqualError(t, err, c.ExpectedErr)
@ -48,99 +48,104 @@ func (c *ArgsTestCase) Assert(t *testing.T) {
}
}
var (
s1 = "decafdecafdecafdecafdecafdecafdecafdecaf"
s2 = "cafecafecafecafecafecafecafecafecafecafe"
)
func TestRevListArgs(t *testing.T) {
for desc, c := range map[string]*ArgsTestCase{
"scan refs deleted, left and right": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{
Mode: ScanRefsMode,
SkipDeletedBlobs: false,
},
ExpectedArgs: []string{"rev-list", "--objects", "--do-walk", "left", "right", "--"},
ExpectedArgs: []string{"rev-list", "--objects", "--do-walk", s1, "^" + s2, "--"},
},
"scan refs not deleted, left and right": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{
Mode: ScanRefsMode,
SkipDeletedBlobs: true,
},
ExpectedArgs: []string{"rev-list", "--objects", "--no-walk", "left", "right", "--"},
ExpectedArgs: []string{"rev-list", "--objects", "--no-walk", s1, "^" + s2, "--"},
},
"scan refs deleted, left only": {
Left: "left", Right: "", Opt: &ScanRefsOptions{
Include: []string{s1}, Opt: &ScanRefsOptions{
Mode: ScanRefsMode,
SkipDeletedBlobs: false,
},
ExpectedArgs: []string{"rev-list", "--objects", "--do-walk", "left", "--"},
ExpectedArgs: []string{"rev-list", "--objects", "--do-walk", s1, "--"},
},
"scan refs not deleted, left only": {
Left: "left", Right: "", Opt: &ScanRefsOptions{
Include: []string{s1}, Opt: &ScanRefsOptions{
Mode: ScanRefsMode,
SkipDeletedBlobs: true,
},
ExpectedArgs: []string{"rev-list", "--objects", "--no-walk", "left", "--"},
ExpectedArgs: []string{"rev-list", "--objects", "--no-walk", s1, "--"},
},
"scan all": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{
Mode: ScanAllMode,
},
ExpectedArgs: []string{"rev-list", "--objects", "--all", "--"},
},
"scan left to remote, no skipped refs": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Opt: &ScanRefsOptions{
Mode: ScanLeftToRemoteMode,
Remote: "origin",
SkippedRefs: []string{},
},
ExpectedArgs: []string{"rev-list", "--objects", "left", "--not", "--remotes=origin", "--"},
ExpectedArgs: []string{"rev-list", "--objects", s1, "--not", "--remotes=origin", "--"},
},
"scan left to remote, skipped refs": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{
Mode: ScanLeftToRemoteMode,
Remote: "origin",
SkippedRefs: []string{"a", "b", "c"},
},
ExpectedArgs: []string{"rev-list", "--objects", "--stdin", "--"},
ExpectedStdin: "left\na\nb\nc",
ExpectedStdin: s1 + "\n^" + s2 + "\na\nb\nc",
},
"scan unknown type": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{
Mode: ScanningMode(-1),
},
ExpectedErr: "unknown scan type: -1",
},
"scan date order": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{
Mode: ScanRefsMode,
Order: DateRevListOrder,
},
ExpectedArgs: []string{"rev-list", "--objects", "--date-order", "--do-walk", "left", "right", "--"},
ExpectedArgs: []string{"rev-list", "--objects", "--date-order", "--do-walk", s1, "^" + s2, "--"},
},
"scan author date order": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{
Mode: ScanRefsMode,
Order: AuthorDateRevListOrder,
},
ExpectedArgs: []string{"rev-list", "--objects", "--author-date-order", "--do-walk", "left", "right", "--"},
ExpectedArgs: []string{"rev-list", "--objects", "--author-date-order", "--do-walk", s1, "^" + s2, "--"},
},
"scan topo order": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{
Mode: ScanRefsMode,
Order: TopoRevListOrder,
},
ExpectedArgs: []string{"rev-list", "--objects", "--topo-order", "--do-walk", "left", "right", "--"},
ExpectedArgs: []string{"rev-list", "--objects", "--topo-order", "--do-walk", s1, "^" + s2, "--"},
},
"scan commits only": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{
Mode: ScanRefsMode,
CommitsOnly: true,
},
ExpectedArgs: []string{"rev-list", "--do-walk", "left", "right", "--"},
ExpectedArgs: []string{"rev-list", "--do-walk", s1, "^" + s2, "--"},
},
"scan reverse": {
Left: "left", Right: "right", Opt: &ScanRefsOptions{
Include: []string{s1}, Exclude: []string{s2}, Opt: &ScanRefsOptions{
Mode: ScanRefsMode,
Reverse: true,
},
ExpectedArgs: []string{"rev-list", "--objects", "--reverse", "--do-walk", "left", "right", "--"},
ExpectedArgs: []string{"rev-list", "--objects", "--reverse", "--do-walk", s1, "^" + s2, "--"},
},
} {
t.Run(desc, c.Assert)

@ -41,7 +41,7 @@ func scanRefsToChan(scanner *GitScanner, pointerCb GitScannerFoundPointer, refLe
panic("no scan ref options")
}
revs, err := revListShas(refLeft, refRight, opt)
revs, err := revListShas([]string{refLeft, refRight}, nil, opt)
if err != nil {
return err
}
@ -89,8 +89,8 @@ func scanRefsToChan(scanner *GitScanner, pointerCb GitScannerFoundPointer, refLe
// revListShas uses git rev-list to return the list of object sha1s
// for the given ref. If all is true, ref is ignored. It returns a
// channel from which sha1 strings can be read.
func revListShas(refLeft, refRight string, opt *ScanRefsOptions) (*StringChannelWrapper, error) {
scanner, err := git.NewRevListScanner(refLeft, refRight, &git.ScanRefsOptions{
func revListShas(include, exclude []string, opt *ScanRefsOptions) (*StringChannelWrapper, error) {
scanner, err := git.NewRevListScanner(include, exclude, &git.ScanRefsOptions{
Mode: git.ScanningMode(opt.ScanMode),
Remote: opt.RemoteName,
SkipDeletedBlobs: opt.SkipDeletedBlobs,

@ -85,6 +85,32 @@ A file2.dat"
)
end_test
begin_test "status --json"
(
set -e
mkdir repo-3
cd repo-3
git init
git lfs track "*.dat"
echo "some data" > file1.dat
git add file1.dat
git commit -m "file1.dat"
echo "other data" > file1.dat
expected='{"files":{"file1.dat":{"status":"M"}}}'
[ "$expected" = "$(git lfs status --json)" ]
git add file1.dat
git commit -m "file1.dat changed"
git mv file1.dat file2.dat
expected='{"files":{"file2.dat":{"status":"R","from":"file1.dat"}}}'
[ "$expected" = "$(git lfs status --json)" ]
)
end_test
begin_test "status: outside git repository"
(

@ -0,0 +1,97 @@
package humanize
import (
"fmt"
"math"
"strconv"
"strings"
"unicode"
"github.com/git-lfs/git-lfs/errors"
)
const (
Byte = 1 << (iota * 10)
Kibibyte
Mebibyte
Gibibyte
Tebibyte
Pebibyte
Kilobyte = 1000 * Byte
Megabyte = 1000 * Kilobyte
Gigabyte = 1000 * Megabyte
Terabyte = 1000 * Gigabyte
Petabyte = 1000 * Terabyte
)
var bytesTable = map[string]uint64{
"b": Byte,
"kib": Kibibyte,
"mib": Mebibyte,
"gib": Gibibyte,
"tib": Tebibyte,
"pib": Pebibyte,
"kb": Kilobyte,
"mb": Megabyte,
"gb": Gigabyte,
"tb": Terabyte,
"pb": Petabyte,
}
// ParseBytes parses a given human-readable bytes or ibytes string into a number
// of bytes, or an error if the string was unable to be parsed.
func ParseBytes(str string) (uint64, error) {
var sep int
for _, r := range str {
if !(unicode.IsDigit(r) || r == '.' || r == ',') {
break
}
sep = sep + 1
}
f, err := strconv.ParseFloat(strings.Replace(str[:sep], ",", "", -1), 64)
if err != nil {
return 0, err
}
unit := strings.ToLower(strings.TrimSpace(str[sep:]))
if m, ok := bytesTable[unit]; ok {
f = f * float64(m)
if f >= math.MaxUint64 {
return 0, errors.New("number of bytes too large")
}
return uint64(f), nil
}
return 0, errors.Errorf("unknown unit: %q", unit)
}
var sizes = []string{"B", "KB", "MB", "GB", "TB", "PB"}
// FormatBytes outputs the given number of bytes "s" as a human-readable string,
// rounding to the nearest half within .01.
func FormatBytes(s uint64) string {
if s < 10 {
return fmt.Sprintf("%d B", s)
}
e := math.Floor(log(float64(s), 1000))
suffix := sizes[int(e)]
val := math.Floor(float64(s)/math.Pow(1000, e)*10+.5) / 10
f := "%.0f %s"
if val < 10 {
f = "%.1f %s"
}
return fmt.Sprintf(f, val, suffix)
}
// log takes the log base "b" of "n" (\log_b{n})
func log(n, b float64) float64 {
return math.Log(n) / math.Log(b)
}

@ -0,0 +1,123 @@
package humanize_test
import (
"math"
"testing"
"github.com/git-lfs/git-lfs/tools/humanize"
"github.com/stretchr/testify/assert"
)
type ParseBytesTestCase struct {
Given string
Expected uint64
Err error
}
func (c *ParseBytesTestCase) Assert(t *testing.T) {
got, err := humanize.ParseBytes(c.Given)
if c.Err == nil {
assert.NoError(t, err, "unexpected error: %s", err)
assert.EqualValues(t, c.Expected, got)
} else {
assert.Equal(t, c.Err, err)
}
}
type FormatBytesTestCase struct {
Given uint64
Expected string
}
func (c *FormatBytesTestCase) Assert(t *testing.T) {
assert.Equal(t, c.Expected, humanize.FormatBytes(c.Given))
}
func TestParseBytes(t *testing.T) {
for desc, c := range map[string]*ParseBytesTestCase{
"parse byte": {"10B", uint64(10 * math.Pow(2, 0)), nil},
"parse kibibyte": {"20KIB", uint64(20 * math.Pow(2, 10)), nil},
"parse mebibyte": {"30MIB", uint64(30 * math.Pow(2, 20)), nil},
"parse gibibyte": {"40GIB", uint64(40 * math.Pow(2, 30)), nil},
"parse tebibyte": {"50TIB", uint64(50 * math.Pow(2, 40)), nil},
"parse pebibyte": {"60PIB", uint64(60 * math.Pow(2, 50)), nil},
"parse byte (lowercase)": {"10b", uint64(10 * math.Pow(2, 0)), nil},
"parse kibibyte (lowercase)": {"20kib", uint64(20 * math.Pow(2, 10)), nil},
"parse mebibyte (lowercase)": {"30mib", uint64(30 * math.Pow(2, 20)), nil},
"parse gibibyte (lowercase)": {"40gib", uint64(40 * math.Pow(2, 30)), nil},
"parse tebibyte (lowercase)": {"50tib", uint64(50 * math.Pow(2, 40)), nil},
"parse pebibyte (lowercase)": {"60pib", uint64(60 * math.Pow(2, 50)), nil},
"parse byte (with space)": {"10 B", uint64(10 * math.Pow(2, 0)), nil},
"parse kibibyte (with space)": {"20 KIB", uint64(20 * math.Pow(2, 10)), nil},
"parse mebibyte (with space)": {"30 MIB", uint64(30 * math.Pow(2, 20)), nil},
"parse gibibyte (with space)": {"40 GIB", uint64(40 * math.Pow(2, 30)), nil},
"parse tebibyte (with space)": {"50 TIB", uint64(50 * math.Pow(2, 40)), nil},
"parse pebibyte (with space)": {"60 PIB", uint64(60 * math.Pow(2, 50)), nil},
"parse byte (with space, lowercase)": {"10 b", uint64(10 * math.Pow(2, 0)), nil},
"parse kibibyte (with space, lowercase)": {"20 kib", uint64(20 * math.Pow(2, 10)), nil},
"parse mebibyte (with space, lowercase)": {"30 mib", uint64(30 * math.Pow(2, 20)), nil},
"parse gibibyte (with space, lowercase)": {"40 gib", uint64(40 * math.Pow(2, 30)), nil},
"parse tebibyte (with space, lowercase)": {"50 tib", uint64(50 * math.Pow(2, 40)), nil},
"parse pebibyte (with space, lowercase)": {"60 pib", uint64(60 * math.Pow(2, 50)), nil},
"parse kilobyte": {"20KB", uint64(20 * math.Pow(10, 3)), nil},
"parse megabyte": {"30MB", uint64(30 * math.Pow(10, 6)), nil},
"parse gigabyte": {"40GB", uint64(40 * math.Pow(10, 9)), nil},
"parse terabyte": {"50TB", uint64(50 * math.Pow(10, 12)), nil},
"parse petabyte": {"60PB", uint64(60 * math.Pow(10, 15)), nil},
"parse kilobyte (lowercase)": {"20kb", uint64(20 * math.Pow(10, 3)), nil},
"parse megabyte (lowercase)": {"30mb", uint64(30 * math.Pow(10, 6)), nil},
"parse gigabyte (lowercase)": {"40gb", uint64(40 * math.Pow(10, 9)), nil},
"parse terabyte (lowercase)": {"50tb", uint64(50 * math.Pow(10, 12)), nil},
"parse petabyte (lowercase)": {"60pb", uint64(60 * math.Pow(10, 15)), nil},
"parse kilobyte (with space)": {"20 KB", uint64(20 * math.Pow(10, 3)), nil},
"parse megabyte (with space)": {"30 MB", uint64(30 * math.Pow(10, 6)), nil},
"parse gigabyte (with space)": {"40 GB", uint64(40 * math.Pow(10, 9)), nil},
"parse terabyte (with space)": {"50 TB", uint64(50 * math.Pow(10, 12)), nil},
"parse petabyte (with space)": {"60 PB", uint64(60 * math.Pow(10, 15)), nil},
"parse kilobyte (with space, lowercase)": {"20 kb", uint64(20 * math.Pow(10, 3)), nil},
"parse megabyte (with space, lowercase)": {"30 mb", uint64(30 * math.Pow(10, 6)), nil},
"parse gigabyte (with space, lowercase)": {"40 gb", uint64(40 * math.Pow(10, 9)), nil},
"parse terabyte (with space, lowercase)": {"50 tb", uint64(50 * math.Pow(10, 12)), nil},
"parse petabyte (with space, lowercase)": {"60 pb", uint64(60 * math.Pow(10, 15)), nil},
} {
t.Run(desc, c.Assert)
}
}
func TestFormatBytes(t *testing.T) {
for desc, c := range map[string]*FormatBytesTestCase{
"format bytes": {uint64(1 * math.Pow(10, 0)), "1 B"},
"format kilobytes": {uint64(1 * math.Pow(10, 3)), "1.0 KB"},
"format megabytes": {uint64(1 * math.Pow(10, 6)), "1.0 MB"},
"format gigabytes": {uint64(1 * math.Pow(10, 9)), "1.0 GB"},
"format petabytes": {uint64(1 * math.Pow(10, 12)), "1.0 TB"},
"format terabytes": {uint64(1 * math.Pow(10, 15)), "1.0 PB"},
"format kilobytes under": {uint64(1.49 * math.Pow(10, 3)), "1.5 KB"},
"format megabytes under": {uint64(1.49 * math.Pow(10, 6)), "1.5 MB"},
"format gigabytes under": {uint64(1.49 * math.Pow(10, 9)), "1.5 GB"},
"format petabytes under": {uint64(1.49 * math.Pow(10, 12)), "1.5 TB"},
"format terabytes under": {uint64(1.49 * math.Pow(10, 15)), "1.5 PB"},
"format kilobytes over": {uint64(1.51 * math.Pow(10, 3)), "1.5 KB"},
"format megabytes over": {uint64(1.51 * math.Pow(10, 6)), "1.5 MB"},
"format gigabytes over": {uint64(1.51 * math.Pow(10, 9)), "1.5 GB"},
"format petabytes over": {uint64(1.51 * math.Pow(10, 12)), "1.5 TB"},
"format terabytes over": {uint64(1.51 * math.Pow(10, 15)), "1.5 PB"},
"format kilobytes exact": {uint64(1.3 * math.Pow(10, 3)), "1.3 KB"},
"format megabytes exact": {uint64(1.3 * math.Pow(10, 6)), "1.3 MB"},
"format gigabytes exact": {uint64(1.3 * math.Pow(10, 9)), "1.3 GB"},
"format petabytes exact": {uint64(1.3 * math.Pow(10, 12)), "1.3 TB"},
"format terabytes exact": {uint64(1.3 * math.Pow(10, 15)), "1.3 PB"},
} {
t.Run(desc, c.Assert)
}
}

@ -0,0 +1,5 @@
// package humanize is designed to parse and format "humanized" versions of
// numbers with units.
//
// Based on: github.com/dustin/go-humanize.
package humanize