Last active
May 22, 2019 18:55
-
-
Save genuinelucifer/77346b81f79d2fdf3853fec16ef32fc0 to your computer and use it in GitHub Desktop.
This patch adapts https://github.com/caglar10ur/Hound/commit/a9208072a0444cd562592ca1bf348bf816ba4c2a to work on https://github.com/etsy/hound to allow indexing and searching on multiple branches...
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
From 51270ab670ce3b4c7d4987e4f28f9615094bae31 Mon Sep 17 00:00:00 2001 | |
From: Abhinav Tripathi <genuinelucifer@gmail.com> | |
Date: Wed, 3 Apr 2019 15:13:05 +0900 | |
Subject: [PATCH] Add the ability to specify multiple branches and search | |
through those | |
Adapted the patch at https://github.com/caglar10ur/Hound/commit/a9208072a0444cd562592ca1bf348bf816ba4c2a | |
and added the option to specify allbranches search | |
--- | |
api/api.go | 35 ++++++++++++++++++----------- | |
client/client.go | 5 +++-- | |
config/config.go | 3 ++- | |
index/index.go | 1 + | |
searcher/searcher.go | 51 +++++++++++++++++++++++++----------------- | |
ui/assets/js/common.js | 3 ++- | |
ui/assets/js/hound.js | 49 ++++++++++++++++++++++++++++++++++------ | |
vcs/bzr.go | 4 ++-- | |
vcs/git.go | 33 +++++++++++++++++++++++---- | |
vcs/hg.go | 4 ++-- | |
vcs/svn.go | 4 ++-- | |
vcs/vcs.go | 10 ++++----- | |
12 files changed, 143 insertions(+), 59 deletions(-) | |
diff --git a/api/api.go b/api/api.go | |
index 017eec2..a7f424e 100644 | |
--- a/api/api.go | |
+++ b/api/api.go | |
@@ -100,21 +100,27 @@ func parseAsBool(v string) bool { | |
return v == "true" || v == "1" || v == "fosho" | |
} | |
-func parseAsRepoList(v string, idx map[string]*searcher.Searcher) []string { | |
+// If all branches are not selected then only search in master branches | |
+func doSearchInRepo(reposToSearch string, repo string, allb bool) bool { | |
+ if reposToSearch != "*" && !strings.Contains(reposToSearch, repo) { | |
+ return false | |
+ } | |
+ if allb == true { | |
+ return true | |
+ } | |
+ if strings.HasSuffix(repo, "@master") { | |
+ return true | |
+ } | |
+ return false | |
+} | |
+ | |
+func parseAsRepoList(v string, allb bool, idx map[string]*searcher.Searcher) []string { | |
v = strings.TrimSpace(v) | |
var repos []string | |
- if v == "*" { | |
- for repo := range idx { | |
+ for repo := range idx { | |
+ if doSearchInRepo(v, repo, allb) { | |
repos = append(repos, repo) | |
} | |
- return repos | |
- } | |
- | |
- for _, repo := range strings.Split(v, ",") { | |
- if idx[repo] == nil { | |
- continue | |
- } | |
- repos = append(repos, repo) | |
} | |
return repos | |
} | |
@@ -174,11 +180,13 @@ func Setup(m *http.ServeMux, idx map[string]*searcher.Searcher) { | |
var opt index.SearchOptions | |
stats := parseAsBool(r.FormValue("stats")) | |
- repos := parseAsRepoList(r.FormValue("repos"), idx) | |
query := r.FormValue("q") | |
opt.Offset, opt.Limit = parseRangeValue(r.FormValue("rng")) | |
opt.FileRegexp = r.FormValue("files") | |
opt.IgnoreCase = parseAsBool(r.FormValue("i")) | |
+ opt.AllBranches = parseAsBool(r.FormValue("allb")) | |
+ repos := parseAsRepoList(r.FormValue("repos"), opt.AllBranches, idx) | |
+ | |
opt.LinesOfContext = parseAsUintValue( | |
r.FormValue("ctx"), | |
0, | |
@@ -227,7 +235,8 @@ func Setup(m *http.ServeMux, idx map[string]*searcher.Searcher) { | |
return | |
} | |
- repos := parseAsRepoList(r.FormValue("repos"), idx) | |
+ var allBranches = parseAsBool(r.FormValue("allb")) | |
+ repos := parseAsRepoList(r.FormValue("repos"), allBranches, idx) | |
for _, repo := range repos { | |
searcher := idx[repo] | |
diff --git a/client/client.go b/client/client.go | |
index 85bbff8..49c32da 100644 | |
--- a/client/client.go | |
+++ b/client/client.go | |
@@ -3,12 +3,13 @@ package client | |
import ( | |
"encoding/json" | |
"fmt" | |
- "github.com/etsy/hound/config" | |
- "github.com/etsy/hound/index" | |
"net/http" | |
"net/url" | |
"regexp" | |
"strings" | |
+ | |
+ "github.com/etsy/hound/config" | |
+ "github.com/etsy/hound/index" | |
) | |
type Response struct { | |
diff --git a/config/config.go b/config/config.go | |
index 1dce70a..7993be2 100644 | |
--- a/config/config.go | |
+++ b/config/config.go | |
@@ -13,7 +13,7 @@ const ( | |
defaultPushEnabled = false | |
defaultPollEnabled = true | |
defaultVcs = "git" | |
- defaultBaseUrl = "{url}/blob/master/{path}{anchor}" | |
+ defaultBaseUrl = "{url}/blob/{branch}/{path}{anchor}" | |
defaultAnchor = "#L{line}" | |
defaultHealthChekURI = "/healthz" | |
) | |
@@ -25,6 +25,7 @@ type UrlPattern struct { | |
type Repo struct { | |
Url string `json:"url"` | |
+ Branches []string `json:"branches"` | |
MsBetweenPolls int `json:"ms-between-poll"` | |
Vcs string `json:"vcs"` | |
VcsConfigMessage *SecretMessage `json:"vcs-config"` | |
diff --git a/index/index.go b/index/index.go | |
index 8b77369..c39e184 100644 | |
--- a/index/index.go | |
+++ b/index/index.go | |
@@ -46,6 +46,7 @@ type SearchOptions struct { | |
FileRegexp string | |
Offset int | |
Limit int | |
+ AllBranches bool | |
} | |
type Match struct { | |
diff --git a/searcher/searcher.go b/searcher/searcher.go | |
index 4cb706a..05c47d3 100644 | |
--- a/searcher/searcher.go | |
+++ b/searcher/searcher.go | |
@@ -52,7 +52,7 @@ type limiter chan bool | |
*/ | |
type foundRefs struct { | |
refs []*index.IndexRef | |
- claimed map[*index.IndexRef]bool | |
+ claimed *sync.Map | |
} | |
func makeLimiter(n int) limiter { | |
@@ -85,7 +85,7 @@ func (r *foundRefs) find(url, rev string) *index.IndexRef { | |
* collected at the end of startup. | |
*/ | |
func (r *foundRefs) claim(ref *index.IndexRef) { | |
- r.claimed[ref] = true | |
+ r.claimed.Store(ref, true) | |
} | |
/** | |
@@ -94,7 +94,7 @@ func (r *foundRefs) claim(ref *index.IndexRef) { | |
*/ | |
func (r *foundRefs) removeUnclaimed() error { | |
for _, ref := range r.refs { | |
- if r.claimed[ref] { | |
+ if _, ok := r.claimed.Load(ref); ok { | |
continue | |
} | |
@@ -216,7 +216,7 @@ func findExistingRefs(dbpath string) (*foundRefs, error) { | |
return &foundRefs{ | |
refs: refs, | |
- claimed: map[*index.IndexRef]bool{}, | |
+ claimed: new(sync.Map), | |
}, nil | |
} | |
@@ -262,8 +262,8 @@ func hashFor(name string) string { | |
} | |
// Create a normalized name for the vcs directory of this repo. | |
-func vcsDirFor(repo *config.Repo) string { | |
- return fmt.Sprintf("vcs-%s", hashFor(repo.Url)) | |
+func vcsDirFor(repo *config.Repo, branch string) string { | |
+ return fmt.Sprintf("vcs-%s-%s", hashFor(repo.Url), branch) | |
} | |
func init() { | |
@@ -286,14 +286,20 @@ func MakeAll(cfg *config.Config) (map[string]*Searcher, map[string]error, error) | |
lim := makeLimiter(cfg.MaxConcurrentIndexers) | |
- n := len(cfg.Repos) | |
+ n := 0 | |
+ for _, repo := range cfg.Repos { | |
+ n = n + len(repo.Branches) | |
+ } | |
+ | |
// Channel to receive the results from newSearcherConcurrent function. | |
resultCh := make(chan searcherResult, n) | |
// Start new searchers for all repos in different go routines while | |
// respecting cfg.MaxConcurrentIndexers. | |
for name, repo := range cfg.Repos { | |
- go newSearcherConcurrent(cfg.DbPath, name, repo, refs, lim, resultCh) | |
+ for _, branch := range repo.Branches { | |
+ go newSearcherConcurrent(cfg.DbPath, name, repo, branch, refs, lim, resultCh) | |
+ } | |
} | |
// Collect the results on resultCh channel for all repos. | |
@@ -321,8 +327,8 @@ func MakeAll(cfg *config.Config) (map[string]*Searcher, map[string]error, error) | |
// Creates a new Searcher that is available for searches as soon as this returns. | |
// This will pull or clone the target repo and start watching the repo for changes. | |
-func New(dbpath, name string, repo *config.Repo) (*Searcher, error) { | |
- s, err := newSearcher(dbpath, name, repo, &foundRefs{}, makeLimiter(1)) | |
+func New(dbpath, name string, repo *config.Repo, branch string) (*Searcher, error) { | |
+ s, err := newSearcher(dbpath, name, repo, branch, &foundRefs{}, makeLimiter(1)) | |
if err != nil { | |
return nil, err | |
} | |
@@ -337,6 +343,7 @@ func updateAndReindex( | |
s *Searcher, | |
dbpath, | |
vcsDir, | |
+ branch, | |
name, | |
rev string, | |
wd *vcs.WorkDir, | |
@@ -348,10 +355,10 @@ func updateAndReindex( | |
defer lim.Release() | |
repo := s.Repo | |
- newRev, err := wd.PullOrClone(vcsDir, repo.Url) | |
+ newRev, err := wd.PullOrClone(vcsDir, repo.Url, branch) | |
if err != nil { | |
- log.Printf("vcs pull error (%s - %s): %s", name, repo.Url, err) | |
+ log.Printf("vcs pull error (%s - %s - %s): %s", name, branch, repo.Url, err) | |
return rev, false | |
} | |
@@ -388,12 +395,13 @@ func updateAndReindex( | |
func newSearcher( | |
dbpath, name string, | |
repo *config.Repo, | |
+ branch string, | |
refs *foundRefs, | |
lim limiter) (*Searcher, error) { | |
- vcsDir := filepath.Join(dbpath, vcsDirFor(repo)) | |
+ vcsDir := filepath.Join(dbpath, vcsDirFor(repo, branch)) | |
- log.Printf("Searcher started for %s", name) | |
+ log.Printf("Searcher started for %s@%s", name, branch) | |
wd, err := vcs.New(repo.Vcs, repo.VcsConfig()) | |
if err != nil { | |
@@ -405,7 +413,7 @@ func newSearcher( | |
SpecialFiles: wd.SpecialFiles(), | |
} | |
- rev, err := wd.PullOrClone(vcsDir, repo.Url) | |
+ rev, err := wd.PullOrClone(vcsDir, repo.Url, branch) | |
if err != nil { | |
return nil, err | |
} | |
@@ -414,7 +422,9 @@ func newSearcher( | |
ref := refs.find(repo.Url, rev) | |
if ref == nil { | |
idxDir = nextIndexDir(dbpath) | |
+ fmt.Printf("Could not find ref for url: %s and branch: %s\n", repo.Url, branch) | |
} else { | |
+ fmt.Printf("For url: %s and branch: %s found ref with dir: %s\n", repo.Url, branch, ref.Dir()) | |
idxDir = ref.Dir() | |
refs.claim(ref) | |
} | |
@@ -464,7 +474,7 @@ func newSearcher( | |
} | |
// attempt to update and reindex this searcher | |
- newRev, ok := updateAndReindex(s, dbpath, vcsDir, name, rev, wd, opt, lim) | |
+ newRev, ok := updateAndReindex(s, dbpath, vcsDir, branch, name, rev, wd, opt, lim) | |
if !ok { | |
continue | |
} | |
@@ -490,6 +500,7 @@ func newSearcher( | |
func newSearcherConcurrent( | |
dbpath, name string, | |
repo *config.Repo, | |
+ branch string, | |
refs *foundRefs, | |
lim limiter, | |
resultCh chan searcherResult) { | |
@@ -498,17 +509,17 @@ func newSearcherConcurrent( | |
lim.Acquire() | |
defer lim.Release() | |
- s, err := newSearcher(dbpath, name, repo, refs, lim) | |
+ s, err := newSearcher(dbpath, name, repo, branch, refs, lim) | |
if err != nil { | |
resultCh <- searcherResult{ | |
- name: name, | |
- err: err, | |
+ name: name + "@" + branch, | |
+ err: err, | |
} | |
return | |
} | |
resultCh <- searcherResult{ | |
- name: name, | |
+ name: name + "@" + branch, | |
searcher: s, | |
} | |
} | |
diff --git a/ui/assets/js/common.js b/ui/assets/js/common.js | |
index 683ff7b..7a6e2f1 100644 | |
--- a/ui/assets/js/common.js | |
+++ b/ui/assets/js/common.js | |
@@ -5,7 +5,7 @@ export function ExpandVars(template, values) { | |
return template; | |
}; | |
-export function UrlToRepo(repo, path, line, rev) { | |
+export function UrlToRepo(repo, branch, path, line, rev) { | |
var url = repo.url.replace(/\.git$/, ''), | |
pattern = repo['url-pattern'], | |
filename = path.substring(path.lastIndexOf('/') + 1), | |
@@ -34,6 +34,7 @@ export function UrlToRepo(repo, path, line, rev) { | |
// I'm sure there is a nicer React/jsx way to do this: | |
return ExpandVars(pattern['base-url'], { | |
url : url, | |
+ branch: branch, | |
path: path, | |
rev: rev, | |
anchor: anchor | |
diff --git a/ui/assets/js/hound.js b/ui/assets/js/hound.js | |
index a953aa2..57aa24a 100644 | |
--- a/ui/assets/js/hound.js | |
+++ b/ui/assets/js/hound.js | |
@@ -70,7 +70,8 @@ var ParamsFromQueryString = function(qs, params) { | |
var ParamsFromUrl = function(params) { | |
params = params || { | |
q: '', | |
- i: 'nope', | |
+ i: 'fosho', | |
+ allb: 'nope', | |
files: '', | |
repos: '*' | |
}; | |
@@ -165,6 +166,26 @@ var Model = { | |
if (params.repos === '') { | |
params.repos = '*'; | |
} | |
+ else { | |
+ // Replace repo names with repo@branch strings | |
+ let allRepos = params.repos.split(','); | |
+ let reposWithBranches = []; | |
+ for (let i = 0; i < allRepos.length; ++i) { | |
+ let repo = allRepos[i]; | |
+ let allBranches = this.repos[repo]["branches"]; | |
+ if (params.allb) | |
+ allBranches.forEach ( function (b) { | |
+ reposWithBranches.push(repo + "@" + b); | |
+ }) | |
+ else if (allBranches.includes("develop")) | |
+ reposWithBranches.push(repo + "@develop"); | |
+ } | |
+ params.repos = reposWithBranches.join(","); | |
+ } | |
+ | |
+ if(params.files == '') { | |
+ params.files = "(\.bat|\.conf|\.css|\.csv|\.cql|\.erb|\.go|\.groovy|\.haml|\.hogan|\.hs|\.htm|\.html|\.j2|\.java|\.js|\.json|\.jsp|\.less|\.md|\.mustache|\.pas|\.php|\.properties|\.py|\.rb|\.scala|\.scss|\.sh|\.sql|\.txt|\.xml|\.yaml|\.yml|Dockerfile)"; | |
+ } | |
_this.params = params; | |
@@ -172,7 +193,7 @@ var Model = { | |
// sending it to the server and having the server do work | |
// to produce an error, we simply return empty results | |
// immediately in the client. | |
- if (params.q == '') { | |
+ if (params.q == '' || params.repos == '') { | |
_this.results = []; | |
_this.resultsByRepo = {}; | |
_this.didSearch.raise(_this, _this.Results); | |
@@ -189,7 +210,6 @@ var Model = { | |
_this.didError.raise(_this, data.Error); | |
return; | |
} | |
- | |
var matches = data.Results, | |
stats = data.Stats, | |
results = []; | |
@@ -223,7 +243,6 @@ var Model = { | |
Total: Date.now() - startedAt, | |
Files: stats.FilesOpened | |
}; | |
- | |
_this.didSearch.raise(_this, _this.results, _this.stats); | |
}, | |
error: function(xhr, status, err) { | |
@@ -269,6 +288,7 @@ var Model = { | |
}, | |
NameForRepo: function(repo) { | |
+ repo = repo.split("@")[0]; | |
var info = this.repos[repo]; | |
if (!info) { | |
return repo; | |
@@ -290,8 +310,11 @@ var Model = { | |
return url.substring(bx + 1, ax) + ' / ' + name; | |
}, | |
- UrlToRepo: function(repo, path, line, rev) { | |
- return UrlToRepo(this.repos[repo], path, line, rev); | |
+ UrlToRepo: function(repo_branch, path, line, rev) { | |
+ let repo_n_branch = repo_branch.split("@"); | |
+ let repo = repo_n_branch[0]; | |
+ let branch = repo_n_branch[1]; | |
+ return UrlToRepo(this.repos[repo], branch, path, line, rev); | |
} | |
}; | |
@@ -389,16 +412,19 @@ var SearchBar = React.createClass({ | |
q : this.refs.q.getDOMNode().value.trim(), | |
files : this.refs.files.getDOMNode().value.trim(), | |
repos : repos.join(','), | |
- i: this.refs.icase.getDOMNode().checked ? 'fosho' : 'nope' | |
+ i: this.refs.icase.getDOMNode().checked ? 'fosho' : 'nope', | |
+ allb: this.refs.allb.getDOMNode().checked ? 'fosho' : 'nope' | |
}; | |
}, | |
setParams: function(params) { | |
var q = this.refs.q.getDOMNode(), | |
i = this.refs.icase.getDOMNode(), | |
+ allb = this.refs.allb.getDOMNode(), | |
files = this.refs.files.getDOMNode(); | |
q.value = params.q; | |
i.checked = ParamValueToBool(params.i); | |
+ allb.checked = ParamValueToBool(params.allb); | |
files.value = params.files; | |
}, | |
hasAdvancedValues: function() { | |
@@ -501,6 +527,12 @@ var SearchBar = React.createClass({ | |
<input id="ignore-case" type="checkbox" ref="icase" /> | |
</div> | |
</div> | |
+ <div className="field"> | |
+ <label htmlFor="all-branches">All branches</label> | |
+ <div className="field-input"> | |
+ <input id="all-branches" type="checkbox" ref="allb" /> | |
+ </div> | |
+ </div> | |
<div className="field"> | |
<label className="multiselect_label" htmlFor="repos">Select Repo</label> | |
<div className="field-input"> | |
@@ -758,6 +790,7 @@ var App = React.createClass({ | |
this.setState({ | |
q: params.q, | |
i: params.i, | |
+ allb: params.allb, | |
files: params.files, | |
repos: repos | |
}); | |
@@ -812,6 +845,7 @@ var App = React.createClass({ | |
var path = location.pathname + | |
'?q=' + encodeURIComponent(params.q) + | |
'&i=' + encodeURIComponent(params.i) + | |
+ '&allb=' + encodeURIComponent(params.allb) + | |
'&files=' + encodeURIComponent(params.files) + | |
'&repos=' + params.repos; | |
history.pushState({path:path}, '', path); | |
@@ -822,6 +856,7 @@ var App = React.createClass({ | |
<SearchBar ref="searchBar" | |
q={this.state.q} | |
i={this.state.i} | |
+ allb={this.state.allb} | |
files={this.state.files} | |
repos={this.state.repos} | |
onSearchRequested={this.onSearchRequested} /> | |
diff --git a/vcs/bzr.go b/vcs/bzr.go | |
index f8acd8d..c12fc9e 100644 | |
--- a/vcs/bzr.go | |
+++ b/vcs/bzr.go | |
@@ -43,7 +43,7 @@ func (g *BzrDriver) HeadRev(dir string) (string, error) { | |
return strings.TrimSpace(buf.String()), cmd.Wait() | |
} | |
-func (g *BzrDriver) Pull(dir string) (string, error) { | |
+func (g *BzrDriver) Pull(dir, branch string) (string, error) { | |
cmd := exec.Command("bzr", "pull") | |
cmd.Dir = dir | |
out, err := cmd.CombinedOutput() | |
@@ -55,7 +55,7 @@ func (g *BzrDriver) Pull(dir string) (string, error) { | |
return g.HeadRev(dir) | |
} | |
-func (g *BzrDriver) Clone(dir, url string) (string, error) { | |
+func (g *BzrDriver) Clone(dir, url, branch string) (string, error) { | |
par, rep := filepath.Split(dir) | |
cmd := exec.Command( | |
"bzr", | |
diff --git a/vcs/git.go b/vcs/git.go | |
index f8c6682..c05e7e7 100644 | |
--- a/vcs/git.go | |
+++ b/vcs/git.go | |
@@ -61,7 +61,26 @@ func run(desc, dir, cmd string, args ...string) error { | |
return nil | |
} | |
-func (g *GitDriver) Pull(dir string) (string, error) { | |
+func (g *GitDriver) Checkout(dir, branch string) error { | |
+ cmd := exec.Command("git", "checkout", branch) | |
+ cmd.Dir = dir | |
+ out, err := cmd.CombinedOutput() | |
+ if err != nil { | |
+ log.Printf("Failed to git checkout %s, see output below\n%sContinuing...", dir, out) | |
+ return err | |
+ } | |
+ return nil | |
+} | |
+ | |
+func (g *GitDriver) Pull(dir, branch string) (string, error) { | |
+ if branch == "" { | |
+ branch = defaultRef | |
+ } | |
+ | |
+ if err := g.Checkout(dir, branch); err != nil { | |
+ return "", err | |
+ } | |
+ | |
if err := run("git fetch", dir, | |
"git", | |
"fetch", | |
@@ -69,7 +88,7 @@ func (g *GitDriver) Pull(dir string) (string, error) { | |
"--no-tags", | |
"--depth", "1", | |
"origin", | |
- fmt.Sprintf("+%s:remotes/origin/%s", defaultRef, defaultRef)); err != nil { | |
+ fmt.Sprintf("+%s:remotes/origin/%s", branch, branch)); err != nil { | |
return "", err | |
} | |
@@ -77,18 +96,24 @@ func (g *GitDriver) Pull(dir string) (string, error) { | |
"git", | |
"reset", | |
"--hard", | |
- fmt.Sprintf("origin/%s", defaultRef)); err != nil { | |
+ fmt.Sprintf("origin/%s", branch)); err != nil { | |
return "", err | |
} | |
return g.HeadRev(dir) | |
} | |
-func (g *GitDriver) Clone(dir, url string) (string, error) { | |
+func (g *GitDriver) Clone(dir, url, branch string) (string, error) { | |
+ if branch == "" { | |
+ branch = defaultRef | |
+ } | |
+ | |
par, rep := filepath.Split(dir) | |
cmd := exec.Command( | |
"git", | |
"clone", | |
+ "-b", | |
+ branch, | |
"--depth", "1", | |
url, | |
rep) | |
diff --git a/vcs/hg.go b/vcs/hg.go | |
index 8e13bb1..6f01ec8 100644 | |
--- a/vcs/hg.go | |
+++ b/vcs/hg.go | |
@@ -47,7 +47,7 @@ func (g *MercurialDriver) HeadRev(dir string) (string, error) { | |
return strings.TrimSpace(buf.String()), cmd.Wait() | |
} | |
-func (g *MercurialDriver) Pull(dir string) (string, error) { | |
+func (g *MercurialDriver) Pull(dir, branch string) (string, error) { | |
cmd := exec.Command("hg", "pull", "-u") | |
cmd.Dir = dir | |
err := cmd.Run() | |
@@ -58,7 +58,7 @@ func (g *MercurialDriver) Pull(dir string) (string, error) { | |
return g.HeadRev(dir) | |
} | |
-func (g *MercurialDriver) Clone(dir, url string) (string, error) { | |
+func (g *MercurialDriver) Clone(dir, url, branch string) (string, error) { | |
par, rep := filepath.Split(dir) | |
cmd := exec.Command( | |
"hg", | |
diff --git a/vcs/svn.go b/vcs/svn.go | |
index 087aa0f..b8a335f 100644 | |
--- a/vcs/svn.go | |
+++ b/vcs/svn.go | |
@@ -54,7 +54,7 @@ func (g *SVNDriver) HeadRev(dir string) (string, error) { | |
return strings.TrimSpace(buf.String()), cmd.Wait() | |
} | |
-func (g *SVNDriver) Pull(dir string) (string, error) { | |
+func (g *SVNDriver) Pull(dir, branch string) (string, error) { | |
cmd := exec.Command( | |
"svn", | |
"update", | |
@@ -73,7 +73,7 @@ func (g *SVNDriver) Pull(dir string) (string, error) { | |
return g.HeadRev(dir) | |
} | |
-func (g *SVNDriver) Clone(dir, url string) (string, error) { | |
+func (g *SVNDriver) Clone(dir, url, branch string) (string, error) { | |
par, rep := filepath.Split(dir) | |
cmd := exec.Command( | |
"svn", | |
diff --git a/vcs/vcs.go b/vcs/vcs.go | |
index 6f2588d..fc2dd6f 100644 | |
--- a/vcs/vcs.go | |
+++ b/vcs/vcs.go | |
@@ -16,10 +16,10 @@ var drivers = make(map[string]func(c []byte) (Driver, error)) | |
type Driver interface { | |
// Clone a new working directory. | |
- Clone(dir, url string) (string, error) | |
+ Clone(dir, url, branch string) (string, error) | |
// Pull new changes from the server and update the working directory. | |
- Pull(dir string) (string, error) | |
+ Pull(dir, branch string) (string, error) | |
// Return the revision at the head of the vcs directory. | |
HeadRev(dir string) (string, error) | |
@@ -69,9 +69,9 @@ func exists(path string) bool { | |
// A utility method that carries out the common operation of cloning | |
// if the working directory is absent and pulling otherwise. | |
-func (w *WorkDir) PullOrClone(dir, url string) (string, error) { | |
+func (w *WorkDir) PullOrClone(dir, url, branch string) (string, error) { | |
if exists(dir) { | |
- return w.Pull(dir) | |
+ return w.Pull(dir, branch) | |
} | |
- return w.Clone(dir, url) | |
+ return w.Clone(dir, url, branch) | |
} | |
-- | |
2.17.1 | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Save the file and use
git am < 0001-Add-the-ability-to-specify-multiple-branches-and-sea.patch
to apply it. It has been tested on 3rd April 2019 by applying it on top of the latest commit on master branch (commit: hound-search/hound@74ec744).I don't have any prior experience with golang and just tried to guess and google to make these changes work. Feel free to standardize the code and send an MR to https://github.com/etsy/hound if you wish.