├── .babelrc ├── .github ├── FUNDING.yml └── workflows │ ├── main.yml │ └── release.yml ├── .gitignore ├── .prettierignore ├── .prettierrc ├── .travis.yml ├── Dockerfile ├── DockerfilePR ├── LICENSE ├── Makefile ├── README.md ├── ansi ├── ansi.go ├── ansi_test.go ├── tty.go ├── tty_darwin.go ├── tty_linux.go └── tty_windows.go ├── api └── api.go ├── cd ├── .gitignore ├── README.md ├── deploy-uwsgi.py └── deploy.example.sh ├── client ├── ack.go ├── client.go ├── coalesce.go ├── coalesce_test.go └── grep.go ├── cmds ├── hound │ └── main.go └── houndd │ └── main.go ├── codesearch ├── AUTHORS ├── CONTRIBUTORS ├── LICENSE ├── README ├── index │ ├── merge.go │ ├── merge_test.go │ ├── mmap_bsd.go │ ├── mmap_linux.go │ ├── mmap_windows.go │ ├── read.go │ ├── read_test.go │ ├── regexp.go │ ├── regexp_test.go │ ├── write.go │ └── write_test.go ├── lib │ ├── README.template │ ├── buildall │ ├── setup │ ├── uploadall │ └── version ├── regexp │ ├── copy.go │ ├── match.go │ ├── regexp.go │ ├── regexp_test.go │ └── utf.go └── sparse │ └── set.go ├── concourse.md ├── concourse.yml ├── config-example.json ├── config ├── config.go └── config_test.go ├── default-config.json ├── index ├── grep.go ├── grep_test.go ├── index.go └── index_test.go ├── jest.config.js ├── misc └── hound.service ├── package-lock.json ├── package.json ├── screen_capture.gif ├── searcher └── searcher.go ├── ui ├── assets │ ├── css │ │ ├── hound.css │ │ └── octicons │ │ │ ├── LICENSE.txt │ │ │ ├── README.md │ │ │ ├── octicons-local.ttf │ │ │ ├── octicons.css │ │ │ ├── octicons.eot │ │ │ ├── octicons.less │ │ │ ├── octicons.svg │ │ │ ├── octicons.ttf │ │ │ ├── octicons.woff │ │ │ └── sprockets-octicons.scss │ ├── excluded_files.tpl.html │ ├── favicon.ico │ ├── images │ │ └── busy.gif │ ├── index.tpl.html │ ├── js │ │ ├── common.test.js │ │ ├── components │ │ │ ├── ExcludedFiles │ │ │ │ ├── ExcludedRow.jsx │ │ │ │ ├── ExcludedTable.jsx │ │ │ │ ├── FilterableExcludedFiles.jsx │ │ │ │ ├── RepoButton.jsx │ │ │ │ └── RepoList.jsx │ │ │ └── HoundApp │ │ │ │ ├── App.jsx │ │ │ │ ├── File.jsx │ │ │ │ ├── FilesView.jsx │ │ │ │ ├── Line.jsx │ │ │ │ ├── Match.jsx │ │ │ │ ├── Repo.jsx │ │ │ │ ├── ResultView.jsx │ │ │ │ ├── SearchBar.jsx │ │ │ │ └── SelectionTooltip.jsx │ │ ├── excluded_files.jsx │ │ ├── helpers │ │ │ ├── Model.js │ │ │ ├── PatternLinks.js │ │ │ ├── SelectionManager.js │ │ │ ├── Signal.js │ │ │ └── common.js │ │ ├── hound.jsx │ │ └── utils │ │ │ └── index.js │ └── open_search.tpl.xml ├── bindata.go ├── content.go └── ui.go ├── vcs ├── bzr.go ├── git.go ├── git_test.go ├── hg.go ├── svn.go ├── svn_test.go ├── vcs.go └── vcs_test.go ├── web └── web.go └── webpack.config.js /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["@babel/preset-env", "@babel/preset-react"] 3 | } -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | patreon: itpp 2 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Rebuilding Docker Images 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | paths: 8 | - "**" 9 | - "!ui/bindata.go" 10 | 11 | jobs: 12 | build: 13 | 14 | runs-on: ubuntu-latest 15 | steps: 16 | - name: Setup Node.js 17 | uses: actions/setup-node@v1 18 | - name: Setup Go for use with actions 19 | uses: actions/setup-go@v2 20 | with: 21 | go-version: '1.13.8' 22 | - uses: actions/checkout@v1 23 | - name: make 24 | run: | 25 | mkdir -p $(go env GOPATH)/src/github.com/itpp-labs/ 26 | ln -s $(pwd) $(go env GOPATH)/src/github.com/itpp-labs/hound 27 | make 28 | - name: Build and publish DEV Docker Image 29 | uses: elgohr/Publish-Docker-Github-Action@master 30 | env: 31 | DEV: yes 32 | with: 33 | name: itpp-labs/hound/dev 34 | registry: docker.pkg.github.com 35 | username: ${{ github.actor }} 36 | password: ${{ secrets.GITHUB_TOKEN }} 37 | buildargs: DEV 38 | - name: Build and publish Production Docker Image 39 | uses: elgohr/Publish-Docker-Github-Action@master 40 | with: 41 | name: itpp-labs/hound/production 42 | registry: docker.pkg.github.com 43 | username: ${{ github.actor }} 44 | password: ${{ secrets.GITHUB_TOKEN }} 45 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | release: 4 | types: [prereleased, edited] 5 | 6 | jobs: 7 | deploy: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Check that it's prerelease 11 | if: github.event.release.prerelease == false 12 | run: | 13 | exit 1 14 | - name: Start deployment 15 | uses: bobheadxi/deployments@v0.2.0 16 | id: deployment 17 | with: 18 | step: start 19 | token: ${{ secrets.GITHUB_TOKEN }} 20 | env: release 21 | 22 | - name: Deploy 23 | run: | 24 | OUTPUT=$(curl -is ${{ secrets.DEPLOYMENT_WEBHOOK }}) 25 | echo "$OUTPUT" 26 | STATUSCODE=$(echo $OUTPUT| grep HTTP/1.1 | awk {'print $2'}) 27 | test $STATUSCODE -ne 200 && exit $STATUSCODE || exit 0 28 | - name: Deployment status 29 | uses: bobheadxi/deployments@v0.2.0 30 | if: always() 31 | with: 32 | step: finish 33 | token: ${{ secrets.GITHUB_TOKEN }} 34 | status: ${{ job.status }} 35 | deployment_id: ${{ steps.deployment.outputs.deployment_id }} 36 | env_url: https://odoo-source.com/ 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.vagrant 2 | /.build 3 | /node_modules 4 | .DS_Store 5 | *.exe 6 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | ui/assets/js/JSXTransformer-0.12.2.js 2 | ui/assets/js/jquery-2.1.3.min.js 3 | ui/assets/js/react-0.12.2.min.js 4 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "tabWidth": 4 3 | } 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: go 2 | 3 | go: 4 | - "1.9" 5 | - "1.11" 6 | - "1.12" 7 | - "1.13" 8 | - tip 9 | install: go get ./... 10 | script: go test ./... 11 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.11 2 | 3 | ARG DEV=no 4 | 5 | ENV GOPATH /go 6 | 7 | RUN apk update \ 8 | && apk add go git subversion libc-dev mercurial bzr openssh 9 | 10 | COPY . /go/src/github.com/itpp-labs/hound 11 | 12 | COPY default-config.json /data/config.json 13 | 14 | RUN go install github.com/itpp-labs/hound/cmds/houndd 15 | 16 | RUN [ "$DEV" = "yes" ] \ 17 | && apk add npm make rsync || true 18 | 19 | RUN [ "$DEV" = "no" ] \ 20 | && apk del go \ 21 | && rm -f /var/cache/apk/* \ 22 | && rm -rf /go/src /go/pkg || true 23 | 24 | VOLUME ["/data"] 25 | 26 | EXPOSE 6080 9000 27 | 28 | ENTRYPOINT ["/go/bin/houndd", "-conf", "/data/config.json"] 29 | -------------------------------------------------------------------------------- /DockerfilePR: -------------------------------------------------------------------------------- 1 | # it's used in concourse.yml 2 | FROM docker.pkg.github.com/itpp-labs/hound/dev 3 | 4 | COPY . /go/src/github.com/itpp-labs/hound 5 | 6 | RUN make -C /go/src/github.com/itpp-labs/hound 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014, Etsy, Inc. 2 | Copyright (c) 2019, IT-Projects LLC 3 | Copyright (c) 2020, IT Projects Labs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | CMDS := $(GOPATH)/bin/houndd $(GOPATH)/bin/hound 2 | 3 | SRCS := $(shell find . -type f -name '*.go') 4 | 5 | WEBPACK_ARGS := -p 6 | ifdef DEBUG 7 | WEBPACK_ARGS := -d 8 | endif 9 | 10 | ALL: $(CMDS) 11 | 12 | ui: ui/bindata.go 13 | 14 | node_modules: 15 | npm install 16 | 17 | $(GOPATH)/bin/houndd: ui/bindata.go $(SRCS) 18 | go install github.com/itpp-labs/hound/cmds/houndd 19 | 20 | $(GOPATH)/bin/hound: ui/bindata.go $(SRCS) 21 | go install github.com/itpp-labs/hound/cmds/hound 22 | 23 | .build/bin/go-bindata: 24 | GOPATH=`pwd`/.build go get github.com/go-bindata/go-bindata/... 25 | 26 | ui/bindata.go: .build/bin/go-bindata node_modules $(wildcard ui/assets/**/*) 27 | rsync -r ui/assets/* .build/ui 28 | npx webpack $(WEBPACK_ARGS) 29 | $< -o $@ -pkg ui -prefix .build/ui -nomemcopy .build/ui/... 30 | 31 | dev: ALL 32 | npm install 33 | 34 | test: 35 | go test github.com/itpp-labs/hound/... 36 | npm test 37 | 38 | clean: 39 | rm -rf .build node_modules 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Hound 2 | 3 | [![Build Status](https://travis-ci.org/itpp-labs/hound.svg?branch=master)](https://travis-ci.org/itpp-labs/hound) 4 | 5 | Hound is an extremely fast source code search engine. The core is based on this article (and code) from Russ Cox: 6 | [Regular Expression Matching with a Trigram Index](http://swtch.com/~rsc/regexp/regexp4.html). Hound itself is a static 7 | [React](http://facebook.github.io/react/) frontend that talks to a [Go](http://golang.org/) backend. The backend keeps an up-to-date index for each repository and answers searches through a minimal API. Here it is in action: 8 | 9 | ![Hound Screen Capture](screen_capture.gif) 10 | 11 | Live demo: https://search.odooism.com/ 12 | 13 | ## Quick Start Guide 14 | 15 | ### Using Go Tools 16 | 17 | 1. Use the Go tools to install Hound. The binaries `houndd` (server) and `hound` (cli) will be installed in your $GOPATH. 18 | 19 | ``` 20 | go get github.com/itpp-labs/hound/cmds/... 21 | ``` 22 | 23 | 2. Create a [config.json](config-example.json) in a directory with your list of repositories. 24 | 25 | 3. Run the Hound server with `houndd` and you should see output similar to: 26 | ``` 27 | 2015/03/13 09:07:42 Searcher started for statsd 28 | 2015/03/13 09:07:42 Searcher started for Hound 29 | 2015/03/13 09:07:42 All indexes built! 30 | 2015/03/13 09:07:42 running server at http://localhost:6080 31 | ``` 32 | 33 | ### Using Docker (1.4+) 34 | 35 | 1. Create a [config.json](config-example.json) in a directory with your list of repositories. 36 | 37 | 2. Run 38 | 39 | docker run -d -p 6080:6080 --name hound -v $(pwd):/data ghcr.io/itpp-labs/hound/production 40 | 41 | 42 | You should be able to navigate to [http://localhost:6080/](http://localhost:6080/) as usual. 43 | 44 | 45 | ## Running in Production 46 | 47 | There are no special flags to run Hound in production. You can use the `--addr=:6880` flag to control the port to which the server binds. Currently, Hound does not support TLS as most users simply run Hound behind either Apache or nginx. Adding TLS support is pretty straight forward though if anyone wants to add it. 48 | 49 | ## Why Another Code Search Tool? 50 | 51 | We've used many similar tools in the past, and most of them are either too slow, too hard to configure, or require too much software to be installed. 52 | Which brings us to... 53 | 54 | ## Requirements 55 | * Go 1.9+ 56 | 57 | Yup, that's it. You can proxy requests to the Go service through Apache/nginx/etc., but that's not required. 58 | 59 | 60 | ## Support 61 | 62 | Currently Hound is only tested on MacOS and CentOS, but it should work on any *nix system. Hound on Windows is not supported but we've heard it compiles and runs just fine. 63 | 64 | Hound supports the following version control systems: 65 | 66 | * Git - This is the default 67 | * Mercurial - use `"vcs" : "hg"` in the config 68 | * SVN - use `"vcs" : "svn"` in the config 69 | * Bazaar - use `"vcs" : "bzr"` in the config 70 | 71 | See [config-example.json](config-example.json) for examples of how to use each VCS. 72 | 73 | ## Private Repositories 74 | 75 | There are a couple of ways to get Hound to index private repositories: 76 | 77 | * Use the `file://` protocol. This allows you to index a local clone of a repository. The downside here is that the polling to keep the repo up to date will 78 | not work. (This also doesn't work on local folders that are not of a supported repository type.) 79 | * Use SSH style URLs in the config: `"url" : "git@github.com:foo/bar.git"`. As long as you have your 80 | [SSH keys](https://help.github.com/articles/generating-ssh-keys/) set up on the box where Hound is running this will work. 81 | 82 | ## Keeping Repos Updated 83 | 84 | By default Hound polls the URL in the config for updates every 30 seconds. You can override this value by setting the `ms-between-poll` key on a per repo basis in the config. If you are indexing a large number of repositories, you may also be interested in tweaking the `max-concurrent-indexers` property. You can see how these work in the [example config](config-example.json). 85 | 86 | ## Search optimization 87 | 88 | If you have large num of repositories you may be interested in tweaking following configs: 89 | 90 | * `max-concurrent-searchers` -- default is 1000 91 | * `max-repos-in-first-result` -- instructs hound to don't show results from more repos than this number 92 | * `max-repos-in-next-result` -- max num repos to show when users clicks "Load from other repos" 93 | 94 | ## Editor Integration 95 | 96 | Currently the following editors have plugins that support Hound: 97 | 98 | * [Sublime Text](https://github.com/bgreenlee/SublimeHound) 99 | * [Vim](https://github.com/urthbound/hound.vim) 100 | * [Emacs](https://github.com/ryoung786/hound.el) 101 | * [Visual Studio Code](https://github.com/sjzext/vscode-hound) 102 | 103 | ## Hacking on Hound 104 | 105 | ### Editing & Building 106 | 107 | #### Requirements: 108 | * make 109 | * Node.js ([Installation Instructions](https://github.com/joyent/node/wiki/Installing-Node.js-via-package-manager)) 110 | 111 | Hound includes a `Makefile` to aid in building locally, but it depends on the source being added to a proper Go workspace so that 112 | Go tools work accordingly. See [Setting GOPATH](https://github.com/golang/go/wiki/SettingGOPATH) for further details about setting 113 | up your Go workspace. With a `GOPATH` set, the following commands will build hound locally. 114 | 115 | ``` 116 | git clone https://github.com/itpp-labs/hound.git ${GOPATH}/src/github.com/itpp-labs/hound 117 | cd ${GOPATH}/src/github.com/itpp-labs/hound 118 | make 119 | ``` 120 | 121 | If this is your only Go project, you can set your GOPATH just for Hound: 122 | ``` 123 | cd 124 | mkdir go 125 | cd go 126 | git clone https://github.com/itpp-labs/hound.git src/github.com/itpp-labs/hound 127 | GOPATH=$(pwd) make -C src/github.com/itpp-labs/hound 128 | ``` 129 | 130 | ### Testing 131 | 132 | There are an increasing number of tests in each of the packages in Hound. Please make sure these pass before uploading your Pull Request. You can run the tests with the following command. 133 | To run the entire test suite, use: 134 | 135 | ``` 136 | make test 137 | ``` 138 | 139 | If you want to just run the JavaScript test suite, use: 140 | ``` 141 | npm test 142 | ``` 143 | 144 | Any Go files that end in `_test.go` are assumed to be test files. Similarly, any JavaScript files that ends in `.test.js` are automatically run by Jest, our test runner. Tests should live next to the files that they cover. [Check out Jest's docs](https://jestjs.io/docs/en/getting-started) for more details on writing Jest tests, and [check out Go's testing docs](https://golang.org/pkg/testing/) for more details on testing Go code. 145 | 146 | ### Working on the web UI 147 | 148 | Hound includes a web UI that is composed of several files (html, css, javascript, etc.). To make sure hound works seamlessly with the standard Go tools, these resources are all bundled inside of the `houndd` binary. Note that changes to the UI will result in local changes to the `ui/bindata.go` file. You must include these changes in your Pull Request. 149 | 150 | To bundle UI changes in `ui/bindata.go` use: 151 | 152 | ``` 153 | make ui 154 | ``` 155 | 156 | To make development easier, there is a flag that will read the files from the file system (allowing the much-loved edit/refresh cycle). 157 | 158 | First you should ensure you have all the dependencies installed that you need by running: 159 | 160 | ``` 161 | make dev 162 | ``` 163 | 164 | Then run the hound server with the --dev option: 165 | 166 | ``` 167 | bin/houndd --dev 168 | ``` 169 | 170 | Note: to make it work, port `9000` should be free. 171 | 172 | ## Credits 173 | 174 | Created at [Etsy](https://www.etsy.com) by: 175 | 176 | * [Kelly Norton](https://github.com/kellegous) 177 | * [Jonathan Klein](https://github.com/jklein) 178 | 179 | Maintained by [IT Projects Labs](https://itpp.dev/). 180 | -------------------------------------------------------------------------------- /ansi/ansi.go: -------------------------------------------------------------------------------- 1 | package ansi 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | ) 7 | 8 | var ( 9 | start = "\033[" 10 | reset = "\033[0m" 11 | bold = "1;" 12 | blink = "5;" 13 | underline = "4;" 14 | inverse = "7;" 15 | ) 16 | 17 | type Style byte 18 | 19 | const ( 20 | Normal Style = 0x00 21 | Bold Style = 0x01 22 | Blink Style = 0x02 23 | Underline Style = 0x04 24 | Invert Style = 0x08 25 | Intense Style = 0x10 26 | ) 27 | 28 | type Color int 29 | 30 | const ( 31 | Black Color = iota 32 | Red 33 | Green 34 | Yellow 35 | Blue 36 | Magenta 37 | Cyan 38 | White 39 | Colorless 40 | ) 41 | 42 | const ( 43 | normalFg = 30 44 | intenseFg = 90 45 | normalBg = 40 46 | intenseBg = 100 47 | ) 48 | 49 | type Colorer struct { 50 | enabled bool 51 | } 52 | 53 | func NewFor(f *os.File) *Colorer { 54 | return &Colorer{isTTY(f.Fd())} 55 | } 56 | 57 | func (c *Colorer) Fg(s string, color Color, style Style) string { 58 | return c.FgBg(s, color, style, Colorless, Normal) 59 | } 60 | 61 | func (c *Colorer) FgBg(s string, fgColor Color, fgStyle Style, bgColor Color, bgStyle Style) string { 62 | if !c.enabled { 63 | return s 64 | } 65 | 66 | buf := make([]byte, 0, 24) 67 | buf = append(buf, start...) 68 | 69 | if fgStyle&Bold != 0 { 70 | buf = append(buf, bold...) 71 | } 72 | 73 | if fgStyle&Blink != 0 { 74 | buf = append(buf, blink...) 75 | } 76 | 77 | if fgStyle&Underline != 0 { 78 | buf = append(buf, underline...) 79 | } 80 | 81 | if fgStyle&Invert != 0 { 82 | buf = append(buf, inverse...) 83 | } 84 | 85 | var fgBase int 86 | if fgStyle&Intense == 0 { 87 | fgBase = normalFg 88 | } else { 89 | fgBase = intenseFg 90 | } 91 | buf = append(buf, fmt.Sprintf("%d;", fgBase+int(fgColor))...) 92 | 93 | if bgColor != Colorless { 94 | var bgBase int 95 | if bgStyle&Intense == 0 { 96 | bgBase = normalBg 97 | } else { 98 | bgBase = intenseBg 99 | } 100 | buf = append(buf, fmt.Sprintf("%d;", bgBase+int(bgColor))...) 101 | } 102 | 103 | buf = append(buf[:len(buf)-1], "m"...) 104 | return string(buf) + s + reset 105 | } 106 | -------------------------------------------------------------------------------- /ansi/ansi_test.go: -------------------------------------------------------------------------------- 1 | package ansi 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "strings" 7 | "testing" 8 | ) 9 | 10 | var ( 11 | printTests = false 12 | ) 13 | 14 | func makeReal(s string) string { 15 | return strings.Replace(s, "~", "\x1b", -1) 16 | } 17 | 18 | func makeFake(s string) string { 19 | return strings.Replace(s, "\x1b", "~", -1) 20 | } 21 | 22 | func assertEqual(t *testing.T, got string, exp string) { 23 | if printTests { 24 | fmt.Println(got) 25 | } 26 | 27 | exp = strings.Replace(exp, "~", "\x1b", -1) 28 | if got != exp { 29 | t.Errorf("mismatch: %s & %s", makeFake(got), makeFake(exp)) 30 | } 31 | } 32 | 33 | func TestEnabled(t *testing.T) { 34 | a := Colorer{true} 35 | assertEqual(t, 36 | a.FgBg("x", Black, Normal, Colorless, Normal), 37 | "~[30mx~[0m") 38 | assertEqual(t, 39 | a.FgBg("x", Red, Normal, Colorless, Normal), 40 | "~[31mx~[0m") 41 | assertEqual(t, 42 | a.FgBg("x", Red, Intense, Colorless, Normal), 43 | "~[91mx~[0m") 44 | assertEqual(t, 45 | a.FgBg("x", Green, Bold|Blink|Underline|Invert, Colorless, Normal), 46 | "~[1;5;4;7;32mx~[0m") 47 | assertEqual(t, 48 | a.FgBg("x", Green, Bold|Blink|Underline|Invert|Intense, Colorless, Normal), 49 | "~[1;5;4;7;92mx~[0m") 50 | 51 | assertEqual(t, 52 | a.FgBg("x", Green, Bold|Blink|Underline|Intense, Magenta, Normal), 53 | "~[1;5;4;92;45mx~[0m") 54 | assertEqual(t, 55 | a.FgBg("x", Yellow, Bold|Blink|Underline|Intense, Cyan, Intense), 56 | "~[1;5;4;93;106mx~[0m") 57 | } 58 | 59 | func TestDisabled(t *testing.T) { 60 | a := Colorer{false} 61 | assertEqual(t, 62 | a.FgBg("x", Black, Normal, Colorless, Normal), 63 | "x") 64 | assertEqual(t, 65 | a.FgBg("foo", Red, Normal, Colorless, Normal), 66 | "foo") 67 | assertEqual(t, 68 | a.FgBg("butter", Red, Intense, Colorless, Normal), 69 | "butter") 70 | assertEqual(t, 71 | a.FgBg("x", Green, Bold|Blink|Underline|Invert, Colorless, Normal), 72 | "x") 73 | assertEqual(t, 74 | a.FgBg("x", Green, Bold|Blink|Underline|Invert|Intense, Colorless, Normal), 75 | "x") 76 | 77 | assertEqual(t, 78 | a.FgBg("x", Green, Bold|Blink|Underline|Intense, Magenta, Normal), 79 | "x") 80 | assertEqual(t, 81 | a.FgBg("x", Yellow, Bold|Blink|Underline|Intense, Cyan, Intense), 82 | "x") 83 | } 84 | 85 | func TestIsTerminal(t *testing.T) { 86 | // just make sure we can call this thing. 87 | isTTY(os.Stdout.Fd()) 88 | } 89 | -------------------------------------------------------------------------------- /ansi/tty.go: -------------------------------------------------------------------------------- 1 | // +build !windows 2 | 3 | package ansi 4 | 5 | import ( 6 | "syscall" 7 | "unsafe" 8 | ) 9 | 10 | // Issue a ioctl syscall to try to read a termios for the descriptor. If 11 | // we are unable to read one, this is not a tty. 12 | func isTTY(fd uintptr) bool { 13 | var termios syscall.Termios 14 | _, _, err := syscall.Syscall6( 15 | syscall.SYS_IOCTL, 16 | fd, 17 | ioctlReadTermios, 18 | uintptr(unsafe.Pointer(&termios)), 19 | 0, 20 | 0, 21 | 0) 22 | return err == 0 23 | } 24 | -------------------------------------------------------------------------------- /ansi/tty_darwin.go: -------------------------------------------------------------------------------- 1 | package ansi 2 | 3 | import "syscall" 4 | 5 | const ioctlReadTermios = syscall.TIOCGETA 6 | -------------------------------------------------------------------------------- /ansi/tty_linux.go: -------------------------------------------------------------------------------- 1 | package ansi 2 | 3 | const ioctlReadTermios = 0x5401 // syscall.TCGETS 4 | const ioctlWriteTermios = 0x5402 // syscall.TCSETS 5 | -------------------------------------------------------------------------------- /ansi/tty_windows.go: -------------------------------------------------------------------------------- 1 | package ansi 2 | 3 | import ( 4 | "syscall" 5 | "unsafe" 6 | ) 7 | 8 | var ( 9 | modkernel32 = syscall.MustLoadDLL("kernel32.dll") 10 | procGetConsoleMode = modkernel32.MustFindProc("GetConsoleMode") 11 | ) 12 | 13 | func isTTY(fd uintptr) bool { 14 | var mode uint32 15 | ret, _, err := procGetConsoleMode.Call(fd, uintptr(unsafe.Pointer(&mode))) 16 | return ret != 0 && err != nil 17 | } 18 | -------------------------------------------------------------------------------- /cd/.gitignore: -------------------------------------------------------------------------------- 1 | deploy.sh 2 | hound.logs 3 | -------------------------------------------------------------------------------- /cd/README.md: -------------------------------------------------------------------------------- 1 | # Continuous deployment 2 | 3 | CD consists of the following steps: 4 | 5 | * Some updates are pushed to master branch 6 | * Github Actions [rebuild images](../.github/workflows/main.yml) 7 | * New prerelease is created on github **manually** 8 | * Github Actions [sends webhooks](../.github/workflows/release.yml) to [WSGI Application](#wsgi-application) 9 | * WSGI App [fetches new docker image and recreate the docker container](deploy.example.sh) 10 | * Deployment is checked **manually** 11 | * Finally, release is published **manually** 12 | 13 | # WSGI App 14 | 15 | * Configure access to github registry with [token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line): 16 | 17 | ```sh 18 | docker login docker.pkg.github.com -u GITHUB_USERNAME -p GITHUB_TOKEN 19 | ``` 20 | 21 | * [install uwsgi](https://uwsgi-docs.readthedocs.io/en/latest/WSGIquickstart.html#installing-uwsgi-with-python-support). 22 | 23 | * Make `deploy.sh` file out of [deploy.example.sh](deploy.example.sh). 24 | 25 | * Deploy WSGI App: 26 | 27 | ```sh 28 | uwsgi --http :9090 --wsgi-file deploy-uwsgi.py &> hound.logs & 29 | ``` 30 | 31 | * [Create secret](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) `DEPLOYMENT_WEBHOOK` with the url to your WSGI App 32 | -------------------------------------------------------------------------------- /cd/deploy-uwsgi.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import subprocess 3 | 4 | def application(env, start_response): 5 | print("REQUEST: %s", env.get('REQUEST_URI')) 6 | res = subprocess.run(['bash', 'deploy.sh', env.get('REQUEST_URI')], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) 7 | 8 | if res.returncode == 0: 9 | code = '200 OK' 10 | else: 11 | code = '500 Error' 12 | start_response(code, [('Content-Type','text/plain')]) 13 | result = res.stdout 14 | print ("RESPONSE:\n%s" % result) 15 | return [result] 16 | -------------------------------------------------------------------------------- /cd/deploy.example.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # don't do anything on a wrong request 3 | test "$1" = "/your-secret" || exit 1 4 | 5 | DOCKER=docker.pkg.github.com/itpp-labs/hound/production 6 | NAME=hound 7 | DATA=$(pwd) 8 | docker pull $DOCKER 9 | docker stop $NAME 10 | docker rm $NAME 11 | docker run -d -p 6080:6080 --name $NAME -v $DATA:/data $DOCKER 12 | docker image prune -f 13 | -------------------------------------------------------------------------------- /client/ack.go: -------------------------------------------------------------------------------- 1 | package client 2 | 3 | import ( 4 | "bytes" 5 | "fmt" 6 | "os" 7 | "regexp" 8 | 9 | "github.com/itpp-labs/hound/ansi" 10 | "github.com/itpp-labs/hound/config" 11 | ) 12 | 13 | type ackPresenter struct { 14 | f *os.File 15 | } 16 | 17 | func hiliteMatches(c *ansi.Colorer, p *regexp.Regexp, line string) string { 18 | // find the indexes for all matches 19 | idxs := p.FindAllStringIndex(line, -1) 20 | 21 | var buf bytes.Buffer 22 | beg := 0 23 | 24 | for _, idx := range idxs { 25 | // for each match add the contents before the match ... 26 | buf.WriteString(line[beg:idx[0]]) 27 | // and the highlighted version of the match 28 | buf.WriteString(c.FgBg(line[idx[0]:idx[1]], 29 | ansi.Black, 30 | ansi.Bold, 31 | ansi.Yellow, 32 | ansi.Intense)) 33 | beg = idx[1] 34 | } 35 | 36 | buf.WriteString(line[beg:]) 37 | 38 | return buf.String() 39 | } 40 | 41 | func lineNumber(c *ansi.Colorer, buf *bytes.Buffer, n int, hasMatch bool) string { 42 | defer buf.Reset() 43 | 44 | s := fmt.Sprintf("%d", n) 45 | buf.WriteString(c.Fg(s, ansi.Yellow, ansi.Bold)) 46 | if hasMatch { 47 | buf.WriteByte(':') 48 | } else { 49 | buf.WriteByte('-') 50 | } 51 | for i := len(s); i < 6; i++ { 52 | buf.WriteByte(' ') 53 | } 54 | return buf.String() 55 | } 56 | 57 | func (p *ackPresenter) Present( 58 | re *regexp.Regexp, 59 | ctx int, 60 | repos map[string]*config.Repo, 61 | res *Response) error { 62 | 63 | c := ansi.NewFor(p.f) 64 | 65 | buf := bytes.NewBuffer(make([]byte, 0, 20)) 66 | 67 | for repo, resp := range res.Results { 68 | if _, err := fmt.Fprintf(p.f, "%s\n", 69 | c.Fg(repoNameFor(repos, repo), ansi.Red, ansi.Bold)); err != nil { 70 | return err 71 | } 72 | 73 | for _, file := range resp.Matches { 74 | if _, err := fmt.Fprintf(p.f, "%s\n", 75 | c.Fg(file.Filename, ansi.Green, ansi.Bold)); err != nil { 76 | return err 77 | } 78 | 79 | blocks := coalesceMatches(file.Matches) 80 | 81 | for _, block := range blocks { 82 | for i, n := 0, len(block.Lines); i < n; i++ { 83 | line := block.Lines[i] 84 | hasMatch := block.Matches[i] 85 | 86 | if hasMatch { 87 | line = hiliteMatches(c, re, line) 88 | } 89 | 90 | if _, err := fmt.Fprintf(p.f, "%s%s\n", 91 | lineNumber(c, buf, block.Start+i, hasMatch), 92 | line); err != nil { 93 | return err 94 | } 95 | } 96 | 97 | if ctx > 0 { 98 | if _, err := fmt.Fprintln(p.f, "--"); err != nil { 99 | return err 100 | } 101 | } 102 | } 103 | 104 | if _, err := fmt.Fprintln(p.f); err != nil { 105 | return err 106 | } 107 | } 108 | } 109 | 110 | return nil 111 | } 112 | 113 | func NewAckPresenter(w *os.File) Presenter { 114 | return &ackPresenter{w} 115 | } 116 | -------------------------------------------------------------------------------- /client/client.go: -------------------------------------------------------------------------------- 1 | package client 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "net/http" 7 | "net/url" 8 | "regexp" 9 | "strings" 10 | 11 | "github.com/itpp-labs/hound/config" 12 | "github.com/itpp-labs/hound/index" 13 | ) 14 | 15 | type Response struct { 16 | Results map[string]*index.SearchResponse 17 | Stats *struct { 18 | FilesOpened int 19 | ReposScanned int 20 | Duration int 21 | } `json:",omitempty"` 22 | } 23 | 24 | type Presenter interface { 25 | Present( 26 | re *regexp.Regexp, 27 | ctx int, 28 | repos map[string]*config.Repo, 29 | res *Response) error 30 | } 31 | 32 | type Config struct { 33 | HttpHeaders map[string]string `json:"http-headers"` 34 | Host string `json:"host"` 35 | } 36 | 37 | // Extract a repo name from the given url. 38 | func repoNameFromUrl(uri string) string { 39 | ax := strings.LastIndex(uri, "/") 40 | if ax < 0 { 41 | return "" 42 | } 43 | 44 | name := uri[ax+1:] 45 | if strings.HasSuffix(name, ".git") { 46 | name = name[:len(name)-4] 47 | } 48 | 49 | bx := strings.LastIndex(uri[:ax-1], "/") 50 | if bx < 0 { 51 | return name 52 | } 53 | 54 | return fmt.Sprintf("%s/%s", uri[bx+1:ax], name) 55 | } 56 | 57 | // Find the proper name for the given repo using the map of repo 58 | // information. 59 | func repoNameFor(repos map[string]*config.Repo, repo string) string { 60 | data := repos[repo] 61 | if data == nil { 62 | return repo 63 | } 64 | 65 | name := repoNameFromUrl(data.Url) 66 | if name == "" { 67 | return repo 68 | } 69 | 70 | return name 71 | } 72 | 73 | func doHttpGet(cfg *Config, uri string) (*http.Response, error) { 74 | req, err := http.NewRequest("GET", uri, nil) 75 | if err != nil { 76 | return nil, err 77 | } 78 | 79 | for key, val := range cfg.HttpHeaders { 80 | if strings.ToLower(key) == "host" { 81 | req.Host = val 82 | } else { 83 | req.Header.Set(key, val) 84 | } 85 | } 86 | 87 | var c http.Client 88 | return c.Do(req) 89 | } 90 | 91 | // Executes a search on the API running on host. 92 | func Search(r *Response, cfg *Config, pattern, repos, files string, context int, ignoreCase, stats bool) error { 93 | u := fmt.Sprintf("http://%s/api/v1/search?%s", 94 | cfg.Host, 95 | url.Values{ 96 | "q": {pattern}, 97 | "repos": {repos}, 98 | "files": {files}, 99 | "ctx": {fmt.Sprintf("%d", context)}, 100 | "i": {fmt.Sprintf("%t", ignoreCase)}, 101 | "stats": {fmt.Sprintf("%t", stats)}, 102 | }.Encode()) 103 | 104 | res, err := doHttpGet(cfg, u) 105 | if err != nil { 106 | return err 107 | } 108 | defer res.Body.Close() 109 | 110 | if res.StatusCode != http.StatusOK { 111 | return fmt.Errorf("Status %d", res.StatusCode) 112 | } 113 | 114 | return json.NewDecoder(res.Body).Decode(r) 115 | } 116 | 117 | // Load the list of repositories from the API running on host. 118 | func LoadRepos(repos map[string]*config.Repo, cfg *Config) error { 119 | res, err := doHttpGet(cfg, fmt.Sprintf("http://%s/api/v1/repos", cfg.Host)) 120 | if err != nil { 121 | return err 122 | } 123 | defer res.Body.Close() 124 | 125 | return json.NewDecoder(res.Body).Decode(&repos) 126 | } 127 | 128 | // Execute a search and load the list of repositories in parallel on the host. 129 | func SearchAndLoadRepos(cfg *Config, pattern, repos, files string, context int, ignoreCase, stats bool) (*Response, map[string]*config.Repo, error) { 130 | chs := make(chan error) 131 | var res Response 132 | go func() { 133 | chs <- Search(&res, cfg, pattern, repos, files, context, ignoreCase, stats) 134 | }() 135 | 136 | chr := make(chan error) 137 | rep := map[string]*config.Repo{} 138 | go func() { 139 | chr <- LoadRepos(rep, cfg) 140 | }() 141 | 142 | // must ready both channels before returning to avoid routine/channel leak. 143 | errS, errR := <-chs, <-chr 144 | 145 | if errS != nil { 146 | return nil, nil, errS 147 | } 148 | 149 | if errR != nil { 150 | return nil, nil, errR 151 | } 152 | 153 | return &res, rep, nil 154 | } 155 | -------------------------------------------------------------------------------- /client/coalesce.go: -------------------------------------------------------------------------------- 1 | package client 2 | 3 | import ( 4 | "github.com/itpp-labs/hound/index" 5 | ) 6 | 7 | type Block struct { 8 | Lines []string 9 | Matches []bool 10 | Start int 11 | } 12 | 13 | func endOfBlock(b *Block) int { 14 | return b.Start + len(b.Lines) - 1 15 | } 16 | 17 | func startOfMatch(m *index.Match) int { 18 | return m.LineNumber - len(m.Before) 19 | } 20 | 21 | func matchIsInBlock(m *index.Match, b *Block) bool { 22 | return startOfMatch(m) <= endOfBlock(b) 23 | } 24 | 25 | func matchToBlock(m *index.Match) *Block { 26 | b, a := len(m.Before), len(m.After) 27 | n := 1 + b + a 28 | l := make([]string, 0, n) 29 | v := make([]bool, n) 30 | 31 | v[b] = true 32 | 33 | for _, line := range m.Before { 34 | l = append(l, line) 35 | } 36 | 37 | l = append(l, m.Line) 38 | 39 | for _, line := range m.After { 40 | l = append(l, line) 41 | } 42 | 43 | return &Block{ 44 | Lines: l, 45 | Matches: v, 46 | Start: m.LineNumber - len(m.Before), 47 | } 48 | } 49 | 50 | func clampZero(n int) int { 51 | if n < 0 { 52 | return 0 53 | } 54 | return n 55 | } 56 | 57 | func mergeMatchIntoBlock(m *index.Match, b *Block) { 58 | off := endOfBlock(b) - startOfMatch(m) + 1 59 | idx := len(b.Lines) - off 60 | nb := len(m.Before) 61 | 62 | for i := off; i < nb; i++ { 63 | b.Lines = append(b.Lines, m.Before[i]) 64 | b.Matches = append(b.Matches, false) 65 | } 66 | 67 | if off < nb+1 { 68 | b.Lines = append(b.Lines, m.Line) 69 | b.Matches = append(b.Matches, true) 70 | } else { 71 | b.Matches[idx+nb] = true 72 | } 73 | 74 | for i, n := clampZero(off-nb-1), len(m.After); i < n; i++ { 75 | b.Lines = append(b.Lines, m.After[i]) 76 | b.Matches = append(b.Matches, false) 77 | } 78 | } 79 | 80 | func coalesceMatches(matches []*index.Match) []*Block { 81 | var res []*Block 82 | var curr *Block 83 | for _, match := range matches { 84 | if curr != nil && matchIsInBlock(match, curr) { 85 | mergeMatchIntoBlock(match, curr) 86 | } else { 87 | if curr != nil { 88 | res = append(res, curr) 89 | } 90 | curr = matchToBlock(match) 91 | } 92 | } 93 | 94 | if curr != nil { 95 | res = append(res, curr) 96 | } 97 | 98 | return res 99 | } 100 | -------------------------------------------------------------------------------- /client/coalesce_test.go: -------------------------------------------------------------------------------- 1 | package client 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/itpp-labs/hound/index" 7 | ) 8 | 9 | // TODO(knorton): 10 | // - Test multiple overlapping. 11 | // - Test asymmetric context 12 | 13 | func stringSlicesAreSame(a, b []string) bool { 14 | if len(a) != len(b) { 15 | return false 16 | } 17 | 18 | for i, n := 0, len(a); i < n; i++ { 19 | if a[i] != b[i] { 20 | return false 21 | } 22 | } 23 | 24 | return true 25 | } 26 | 27 | func boolSlicesAreSame(a, b []bool) bool { 28 | if len(a) != len(b) { 29 | return false 30 | } 31 | 32 | for i, n := 0, len(a); i < n; i++ { 33 | if a[i] != b[i] { 34 | return false 35 | } 36 | } 37 | 38 | return true 39 | } 40 | 41 | func assertBlocksAreSame(t *testing.T, a, b *Block) bool { 42 | if !stringSlicesAreSame(a.Lines, b.Lines) { 43 | t.Errorf("bad lines: expected: %v, got: %v", a.Lines, b.Lines) 44 | return false 45 | } 46 | 47 | if !boolSlicesAreSame(a.Matches, b.Matches) { 48 | t.Errorf("bad matches: expected: %v, got: %v", a.Matches, b.Matches) 49 | return false 50 | } 51 | 52 | if a.Start != b.Start { 53 | t.Errorf("bad start: expected %d, got %d", a.Start, b.Start) 54 | return false 55 | } 56 | 57 | return true 58 | } 59 | 60 | func assertBlockSlicesAreSame(t *testing.T, a, b []*Block) bool { 61 | if len(a) != len(b) { 62 | t.Errorf("blocks do not match, len(a)=%d & len(b)=%d", len(a), len(b)) 63 | return false 64 | } 65 | 66 | for i, n := 0, len(a); i < n; i++ { 67 | if !assertBlocksAreSame(t, a[i], b[i]) { 68 | return false 69 | } 70 | } 71 | 72 | return true 73 | } 74 | 75 | func testThis(t *testing.T, subj []*index.Match, expt []*Block, desc string) { 76 | if !assertBlockSlicesAreSame(t, expt, coalesceMatches(subj)) { 77 | t.Errorf("case failed: %s", desc) 78 | } 79 | } 80 | 81 | func TestNonOverlap(t *testing.T) { 82 | subj := []*index.Match{ 83 | &index.Match{ 84 | Line: "c", 85 | LineNumber: 40, 86 | Before: []string{"a", "b"}, 87 | After: []string{"d", "e"}, 88 | }, 89 | &index.Match{ 90 | Line: "n", 91 | LineNumber: 50, 92 | Before: []string{"l", "m"}, 93 | After: []string{"o", "p"}, 94 | }, 95 | } 96 | 97 | expt := []*Block{ 98 | &Block{ 99 | Lines: []string{"a", "b", "c", "d", "e"}, 100 | Matches: []bool{false, false, true, false, false}, 101 | Start: 38, 102 | }, 103 | &Block{ 104 | Lines: []string{"l", "m", "n", "o", "p"}, 105 | Matches: []bool{false, false, true, false, false}, 106 | Start: 48, 107 | }, 108 | } 109 | 110 | testThis(t, subj, expt, 111 | "non-overlap w/ context") 112 | } 113 | func TestNonOverlapWithNoContext(t *testing.T) { 114 | subj := []*index.Match{ 115 | &index.Match{ 116 | Line: "a", 117 | LineNumber: 40, 118 | }, 119 | &index.Match{ 120 | Line: "b", 121 | LineNumber: 50, 122 | }, 123 | } 124 | 125 | expt := []*Block{ 126 | &Block{ 127 | Lines: []string{"a"}, 128 | Matches: []bool{true}, 129 | Start: 40, 130 | }, 131 | 132 | &Block{ 133 | Lines: []string{"b"}, 134 | Matches: []bool{true}, 135 | Start: 50, 136 | }, 137 | } 138 | 139 | testThis(t, subj, expt, 140 | "non-overlap w/o context") 141 | } 142 | 143 | func TestOverlappingInBefore(t *testing.T) { 144 | subj := []*index.Match{ 145 | &index.Match{ 146 | Line: "c", 147 | LineNumber: 40, 148 | Before: []string{"a", "b"}, 149 | After: []string{"d", "e"}, 150 | }, 151 | &index.Match{ 152 | Line: "g", 153 | LineNumber: 44, 154 | Before: []string{"e", "f"}, 155 | After: []string{"h", "i"}, 156 | }, 157 | } 158 | 159 | expt := []*Block{ 160 | &Block{ 161 | Lines: []string{"a", "b", "c", "d", "e", "f", "g", "h", "i"}, 162 | Matches: []bool{false, false, true, false, false, false, true, false, false}, 163 | Start: 38, 164 | }, 165 | } 166 | 167 | testThis(t, subj, expt, 168 | "overlap in before") 169 | } 170 | func TestOverlappingInAfter(t *testing.T) { 171 | subj := []*index.Match{ 172 | &index.Match{ 173 | Line: "c", 174 | LineNumber: 40, 175 | Before: []string{"a", "b"}, 176 | After: []string{"d", "e"}, 177 | }, 178 | &index.Match{ 179 | Line: "d", 180 | LineNumber: 41, 181 | Before: []string{"b", "c"}, 182 | After: []string{"e", "f"}, 183 | }, 184 | } 185 | 186 | expt := []*Block{ 187 | &Block{ 188 | Lines: []string{"a", "b", "c", "d", "e", "f"}, 189 | Matches: []bool{false, false, true, true, false, false}, 190 | Start: 38, 191 | }, 192 | } 193 | 194 | testThis(t, subj, expt, 195 | "overlap in after") 196 | } 197 | 198 | func TestOverlapOnMatch(t *testing.T) { 199 | subj := []*index.Match{ 200 | &index.Match{ 201 | Line: "c", 202 | LineNumber: 40, 203 | Before: []string{"a", "b"}, 204 | After: []string{"d", "e"}, 205 | }, 206 | &index.Match{ 207 | Line: "e", 208 | LineNumber: 42, 209 | Before: []string{"c", "d"}, 210 | After: []string{"f", "g"}, 211 | }, 212 | } 213 | 214 | expt := []*Block{ 215 | &Block{ 216 | Lines: []string{"a", "b", "c", "d", "e", "f", "g"}, 217 | Matches: []bool{false, false, true, false, true, false, false}, 218 | Start: 38, 219 | }, 220 | } 221 | 222 | testThis(t, subj, expt, 223 | "overlap on match") 224 | } 225 | 226 | func TestMatchesToEnd(t *testing.T) { 227 | file := []string{ 228 | "import analytics.sequence._;", 229 | "import analytics._;", 230 | "println(\"Try running\")", 231 | "println(\"val visits = VisitExplorer(100)\");", 232 | } 233 | 234 | subj := []*index.Match{ 235 | &index.Match{ 236 | Line: file[2], 237 | LineNumber: 3, 238 | Before: []string{file[0], file[1]}, 239 | After: []string{file[3]}, 240 | }, 241 | 242 | &index.Match{ 243 | Line: file[3], 244 | LineNumber: 4, 245 | Before: []string{file[1], file[2]}, 246 | After: nil, 247 | }, 248 | } 249 | 250 | expt := []*Block{ 251 | &Block{ 252 | Lines: []string{file[0], file[1], file[2], file[3]}, 253 | Matches: []bool{false, false, true, true}, 254 | Start: 1, 255 | }, 256 | } 257 | 258 | testThis(t, subj, expt, 259 | "test matches at end of file") 260 | } 261 | -------------------------------------------------------------------------------- /client/grep.go: -------------------------------------------------------------------------------- 1 | package client 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "regexp" 7 | 8 | "github.com/itpp-labs/hound/ansi" 9 | "github.com/itpp-labs/hound/config" 10 | ) 11 | 12 | type grepPresenter struct { 13 | f *os.File 14 | } 15 | 16 | func (p *grepPresenter) Present( 17 | re *regexp.Regexp, 18 | ctx int, 19 | repos map[string]*config.Repo, 20 | res *Response) error { 21 | 22 | c := ansi.NewFor(p.f) 23 | 24 | if _, err := fmt.Fprintf(p.f, "%s\n", 25 | c.Fg("// TODO(knorton): Implement", ansi.Yellow, ansi.Bold)); err != nil { 26 | return err 27 | } 28 | 29 | return nil 30 | } 31 | 32 | func NewGrepPresenter(w *os.File) Presenter { 33 | return &grepPresenter{w} 34 | } 35 | -------------------------------------------------------------------------------- /cmds/hound/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | "flag" 6 | "log" 7 | "os" 8 | "os/user" 9 | "regexp" 10 | 11 | "github.com/itpp-labs/hound/client" 12 | "github.com/itpp-labs/hound/index" 13 | ) 14 | 15 | // A uninitialized variable that can be defined during the build process with 16 | // -ldflags -X main.defaultHouse addr. This should remain uninitialized. 17 | var defaultHost string 18 | 19 | // a convenience method for creating a new presenter that is either 20 | // ack-like or grep-like. 21 | func newPresenter(likeGrep bool) client.Presenter { 22 | if likeGrep { 23 | return client.NewGrepPresenter(os.Stdout) 24 | } 25 | 26 | return client.NewAckPresenter(os.Stdout) 27 | } 28 | 29 | // the paths we will attempt to load config from 30 | var configPaths = []string{ 31 | "/etc/hound.conf", 32 | "$HOME/.hound", 33 | } 34 | 35 | // Attempt to populate a client.Config from the json found in 36 | // filename. 37 | func loadConfigFrom(filename string, cfg *client.Config) error { 38 | r, err := os.Open(filename) 39 | if err != nil { 40 | return err 41 | } 42 | defer r.Close() 43 | 44 | return json.NewDecoder(r).Decode(cfg) 45 | } 46 | 47 | // Attempt to populate a client.Config from the json found in 48 | // any of the configPaths. 49 | func loadConfig(cfg *client.Config) error { 50 | u, err := user.Current() 51 | if err != nil { 52 | return err 53 | } 54 | 55 | env := map[string]string{ 56 | "HOME": u.HomeDir, 57 | } 58 | 59 | for _, path := range configPaths { 60 | err = loadConfigFrom(os.Expand(path, func(name string) string { 61 | return env[name] 62 | }), cfg) 63 | 64 | if os.IsNotExist(err) { 65 | continue 66 | } else if err != nil { 67 | return err 68 | } 69 | } 70 | 71 | return nil 72 | } 73 | 74 | // A simple way to determine what the default value should be 75 | // for the --host flag. 76 | func defaultFlagForHost() string { 77 | if defaultHost != "" { 78 | return defaultHost 79 | } 80 | return "localhost:6080" 81 | } 82 | 83 | func main() { 84 | flagHost := flag.String("host", defaultFlagForHost(), "") 85 | flagRepos := flag.String("repos", "*", "") 86 | flagFiles := flag.String("files", "", "") 87 | flagContext := flag.Int("context", 2, "") 88 | flagCase := flag.Bool("ignore-case", false, "") 89 | flagStats := flag.Bool("show-stats", false, "") 90 | flagGrep := flag.Bool("like-grep", false, "") 91 | 92 | flag.Parse() 93 | 94 | if flag.NArg() != 1 { 95 | flag.Usage() 96 | return 97 | } 98 | 99 | pat := index.GetRegexpPattern(flag.Arg(0), *flagCase) 100 | 101 | reg, err := regexp.Compile(pat) 102 | if err != nil { 103 | // TODO(knorton): Better error reporting 104 | log.Panic(err) 105 | } 106 | 107 | cfg := client.Config{ 108 | Host: *flagHost, 109 | HttpHeaders: nil, 110 | } 111 | 112 | if err := loadConfig(&cfg); err != nil { 113 | log.Panic(err) 114 | } 115 | 116 | res, repos, err := client.SearchAndLoadRepos(&cfg, 117 | flag.Arg(0), 118 | *flagRepos, 119 | *flagFiles, 120 | *flagContext, 121 | *flagCase, 122 | *flagStats) 123 | if err != nil { 124 | log.Panic(err) 125 | } 126 | 127 | if err := newPresenter(*flagGrep).Present(reg, *flagContext, repos, res); err != nil { 128 | log.Panic(err) 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /cmds/houndd/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "flag" 5 | "log" 6 | //"net/http" 7 | "os" 8 | "os/exec" 9 | "os/signal" 10 | "path/filepath" 11 | "runtime" 12 | "strings" 13 | "syscall" 14 | 15 | //"github.com/itpp-labs/hound/api" 16 | "github.com/itpp-labs/hound/config" 17 | "github.com/itpp-labs/hound/searcher" 18 | //"github.com/itpp-labs/hound/ui" 19 | "github.com/itpp-labs/hound/web" 20 | ) 21 | 22 | const gracefulShutdownSignal = syscall.SIGTERM 23 | 24 | var ( 25 | info_log *log.Logger 26 | error_log *log.Logger 27 | _, b, _, _ = runtime.Caller(0) 28 | basepath = filepath.Dir(b) 29 | ) 30 | 31 | func makeSearchers(cfg *config.Config) (map[string]*searcher.Searcher, bool, error) { 32 | // Ensure we have a dbpath 33 | if _, err := os.Stat(cfg.DbPath); err != nil { 34 | if err := os.MkdirAll(cfg.DbPath, os.ModePerm); err != nil { 35 | return nil, false, err 36 | } 37 | } 38 | 39 | searchers, errs, err := searcher.MakeAll(cfg) 40 | if err != nil { 41 | return nil, false, err 42 | } 43 | 44 | if len(errs) > 0 { 45 | // NOTE: This mutates the original config so the repos 46 | // are not even seen by other code paths. 47 | n := 0 48 | for _, repo := range cfg.Repos { 49 | _, has_error := errs[repo.Name] 50 | if !has_error { 51 | cfg.Repos[n] = repo 52 | n++ 53 | } 54 | } 55 | cfg.Repos = cfg.Repos[:n] 56 | 57 | return searchers, false, nil 58 | } 59 | 60 | return searchers, true, nil 61 | } 62 | 63 | func handleShutdown(shutdownCh <-chan os.Signal, searchers map[string]*searcher.Searcher) { 64 | go func() { 65 | <-shutdownCh 66 | info_log.Printf("Graceful shutdown requested...") 67 | for _, s := range searchers { 68 | s.Stop() 69 | } 70 | 71 | for _, s := range searchers { 72 | s.Wait() 73 | } 74 | 75 | os.Exit(0) 76 | }() 77 | } 78 | 79 | func registerShutdownSignal() <-chan os.Signal { 80 | shutdownCh := make(chan os.Signal, 1) 81 | signal.Notify(shutdownCh, gracefulShutdownSignal) 82 | return shutdownCh 83 | } 84 | 85 | /* 86 | // TODO: if we need this method, it should be update according to new method specs 87 | func runHttp( 88 | addr string, 89 | dev bool, 90 | cfg *config.Config, 91 | idx map[string]*searcher.Searcher) error { 92 | m := http.DefaultServeMux 93 | 94 | h, err := ui.Content(dev, cfg) 95 | if err != nil { 96 | return err 97 | } 98 | 99 | m.Handle("/", h) 100 | api.Setup(m, idx) 101 | return http.ListenAndServe(addr, m) 102 | } 103 | */ 104 | 105 | func main() { 106 | runtime.GOMAXPROCS(runtime.NumCPU()) 107 | info_log = log.New(os.Stdout, "", log.LstdFlags) 108 | error_log = log.New(os.Stderr, "", log.LstdFlags) 109 | 110 | flagConf := flag.String("conf", "config.json", "") 111 | flagAddr := flag.String("addr", ":6080", "") 112 | flagDev := flag.Bool("dev", false, "") 113 | 114 | flag.Parse() 115 | 116 | var cfg config.Config 117 | if err := cfg.LoadFromFile(*flagConf); err != nil { 118 | panic(err) 119 | } 120 | 121 | // Start the web server on a background routine. 122 | ws := web.Start(&cfg, *flagAddr, *flagDev) 123 | 124 | // It's not safe to be killed during makeSearchers, so register the 125 | // shutdown signal here and defer processing it until we are ready. 126 | shutdownCh := registerShutdownSignal() 127 | idx, ok, err := makeSearchers(&cfg) 128 | if err != nil { 129 | log.Panic(err) 130 | } 131 | if !ok { 132 | info_log.Println("Some repos failed to index, see output above") 133 | } else { 134 | info_log.Println("All indexes built!") 135 | } 136 | 137 | handleShutdown(shutdownCh, idx) 138 | 139 | host := *flagAddr 140 | if strings.HasPrefix(host, ":") { 141 | host = "localhost" + host 142 | } 143 | 144 | if *flagDev { 145 | info_log.Printf("[DEV] starting webpack-dev-server at localhost:9000...") 146 | webpack := exec.Command("./node_modules/.bin/webpack-dev-server", "--mode", "development") 147 | webpack.Dir = basepath + "/../../" 148 | webpack.Stdout = os.Stdout 149 | webpack.Stderr = os.Stderr 150 | err = webpack.Start() 151 | if err != nil { 152 | error_log.Println(err) 153 | } 154 | } 155 | 156 | info_log.Printf("running server at http://%s\n", host) 157 | 158 | // Fully enable the web server now that we have indexes 159 | panic(ws.ServeWithIndex(idx)) 160 | } 161 | -------------------------------------------------------------------------------- /codesearch/AUTHORS: -------------------------------------------------------------------------------- 1 | # This source code is copyright "The Go Authors", 2 | # as defined by the AUTHORS file in the root of the Go tree. 3 | # 4 | # http://tip.golang.org/AUTHORS. 5 | -------------------------------------------------------------------------------- /codesearch/CONTRIBUTORS: -------------------------------------------------------------------------------- 1 | # The official list of people who can contribute code to the repository 2 | # is maintained in the standard Go repository as the CONTRIBUTORS 3 | # file in the root of the Go tree. 4 | # 5 | # http://tip.golang.org/CONTRIBUTORS 6 | -------------------------------------------------------------------------------- /codesearch/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011 The Go Authors. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are 5 | met: 6 | 7 | * Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | * Redistributions in binary form must reproduce the above 10 | copyright notice, this list of conditions and the following disclaimer 11 | in the documentation and/or other materials provided with the 12 | distribution. 13 | * Neither the name of Google Inc. nor the names of its 14 | contributors may be used to endorse or promote products derived from 15 | this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 18 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 19 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 20 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 21 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 22 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 23 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /codesearch/README: -------------------------------------------------------------------------------- 1 | Code Search is a tool for indexing and then performing 2 | regular expression searches over large bodies of source code. 3 | It is a set of command-line programs written in Go. 4 | 5 | For background and an overview of the commands, 6 | see http://swtch.com/~rsc/regexp/regexp4.html. 7 | 8 | To install: 9 | 10 | go get code.google.com/p/codesearch/cmd/... 11 | 12 | Russ Cox 13 | rsc@swtch.com 14 | January 2012 15 | -------------------------------------------------------------------------------- /codesearch/index/merge.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | package index 6 | 7 | // Merging indexes. 8 | // 9 | // To merge two indexes A and B (newer) into a combined index C: 10 | // 11 | // Load the path list from B and determine for each path the docid ranges 12 | // that it will replace in A. 13 | // 14 | // Read A's and B's name lists together, merging them into C's name list. 15 | // Discard the identified ranges from A during the merge. Also during the merge, 16 | // record the mapping from A's docids to C's docids, and also the mapping from 17 | // B's docids to C's docids. Both mappings can be summarized in a table like 18 | // 19 | // 10-14 map to 20-24 20 | // 15-24 is deleted 21 | // 25-34 maps to 40-49 22 | // 23 | // The number of ranges will be at most the combined number of paths. 24 | // Also during the merge, write the name index to a temporary file as usual. 25 | // 26 | // Now merge the posting lists (this is why they begin with the trigram). 27 | // During the merge, translate the docid numbers to the new C docid space. 28 | // Also during the merge, write the posting list index to a temporary file as usual. 29 | // 30 | // Copy the name index and posting list index into C's index and write the trailer. 31 | // Rename C's index onto the new index. 32 | 33 | import ( 34 | "encoding/binary" 35 | "os" 36 | "strings" 37 | ) 38 | 39 | // An idrange records that the half-open interval [lo, hi) maps to [new, new+hi-lo). 40 | type idrange struct { 41 | lo, hi, new uint32 42 | } 43 | 44 | type postIndex struct { 45 | tri uint32 46 | count uint32 47 | offset uint32 48 | } 49 | 50 | // Merge creates a new index in the file dst that corresponds to merging 51 | // the two indices src1 and src2. If both src1 and src2 claim responsibility 52 | // for a path, src2 is assumed to be newer and is given preference. 53 | func Merge(dst, src1, src2 string) { 54 | ix1 := Open(src1) 55 | ix2 := Open(src2) 56 | paths1 := ix1.Paths() 57 | paths2 := ix2.Paths() 58 | 59 | // Build docid maps. 60 | var i1, i2, new uint32 61 | var map1, map2 []idrange 62 | for _, path := range paths2 { 63 | // Determine range shadowed by this path. 64 | old := i1 65 | for i1 < uint32(ix1.numName) && ix1.Name(i1) < path { 66 | i1++ 67 | } 68 | lo := i1 69 | limit := path[:len(path)-1] + string(path[len(path)-1]+1) 70 | for i1 < uint32(ix1.numName) && ix1.Name(i1) < limit { 71 | i1++ 72 | } 73 | hi := i1 74 | 75 | // Record range before the shadow. 76 | if old < lo { 77 | map1 = append(map1, idrange{old, lo, new}) 78 | new += lo - old 79 | } 80 | 81 | // Determine range defined by this path. 82 | // Because we are iterating over the ix2 paths, 83 | // there can't be gaps, so it must start at i2. 84 | if i2 < uint32(ix2.numName) && ix2.Name(i2) < path { 85 | panic("merge: inconsistent index") 86 | } 87 | lo = i2 88 | for i2 < uint32(ix2.numName) && ix2.Name(i2) < limit { 89 | i2++ 90 | } 91 | hi = i2 92 | if lo < hi { 93 | map2 = append(map2, idrange{lo, hi, new}) 94 | new += hi - lo 95 | } 96 | } 97 | 98 | if i1 < uint32(ix1.numName) { 99 | map1 = append(map1, idrange{i1, uint32(ix1.numName), new}) 100 | new += uint32(ix1.numName) - i1 101 | } 102 | if i2 < uint32(ix2.numName) { 103 | panic("merge: inconsistent index") 104 | } 105 | numName := new 106 | 107 | ix3 := bufCreate(dst) 108 | ix3.writeString(magic) 109 | 110 | // Merged list of paths. 111 | pathData := ix3.offset() 112 | mi1 := 0 113 | mi2 := 0 114 | last := "\x00" // not a prefix of anything 115 | for mi1 < len(paths1) || mi2 < len(paths2) { 116 | var p string 117 | if mi2 >= len(paths2) || mi1 < len(paths1) && paths1[mi1] <= paths2[mi2] { 118 | p = paths1[mi1] 119 | mi1++ 120 | } else { 121 | p = paths2[mi2] 122 | mi2++ 123 | } 124 | if strings.HasPrefix(p, last) { 125 | continue 126 | } 127 | last = p 128 | ix3.writeString(p) 129 | ix3.writeString("\x00") 130 | } 131 | ix3.writeString("\x00") 132 | 133 | // Merged list of names. 134 | nameData := ix3.offset() 135 | nameIndexFile := bufCreate("") 136 | new = 0 137 | mi1 = 0 138 | mi2 = 0 139 | for new < numName { 140 | if mi1 < len(map1) && map1[mi1].new == new { 141 | for i := map1[mi1].lo; i < map1[mi1].hi; i++ { 142 | name := ix1.Name(i) 143 | nameIndexFile.writeUint32(ix3.offset() - nameData) 144 | ix3.writeString(name) 145 | ix3.writeString("\x00") 146 | new++ 147 | } 148 | mi1++ 149 | } else if mi2 < len(map2) && map2[mi2].new == new { 150 | for i := map2[mi2].lo; i < map2[mi2].hi; i++ { 151 | name := ix2.Name(i) 152 | nameIndexFile.writeUint32(ix3.offset() - nameData) 153 | ix3.writeString(name) 154 | ix3.writeString("\x00") 155 | new++ 156 | } 157 | mi2++ 158 | } else { 159 | panic("merge: inconsistent index") 160 | } 161 | } 162 | if new*4 != nameIndexFile.offset() { 163 | panic("merge: inconsistent index") 164 | } 165 | nameIndexFile.writeUint32(ix3.offset()) 166 | 167 | // Merged list of posting lists. 168 | postData := ix3.offset() 169 | var r1 postMapReader 170 | var r2 postMapReader 171 | var w postDataWriter 172 | r1.init(ix1, map1) 173 | r2.init(ix2, map2) 174 | w.init(ix3) 175 | for { 176 | if r1.trigram < r2.trigram { 177 | w.trigram(r1.trigram) 178 | for r1.nextId() { 179 | w.fileid(r1.fileid) 180 | } 181 | r1.nextTrigram() 182 | w.endTrigram() 183 | } else if r2.trigram < r1.trigram { 184 | w.trigram(r2.trigram) 185 | for r2.nextId() { 186 | w.fileid(r2.fileid) 187 | } 188 | r2.nextTrigram() 189 | w.endTrigram() 190 | } else { 191 | if r1.trigram == ^uint32(0) { 192 | break 193 | } 194 | w.trigram(r1.trigram) 195 | r1.nextId() 196 | r2.nextId() 197 | for r1.fileid < ^uint32(0) || r2.fileid < ^uint32(0) { 198 | if r1.fileid < r2.fileid { 199 | w.fileid(r1.fileid) 200 | r1.nextId() 201 | } else if r2.fileid < r1.fileid { 202 | w.fileid(r2.fileid) 203 | r2.nextId() 204 | } else { 205 | panic("merge: inconsistent index") 206 | } 207 | } 208 | r1.nextTrigram() 209 | r2.nextTrigram() 210 | w.endTrigram() 211 | } 212 | } 213 | 214 | // Name index 215 | nameIndex := ix3.offset() 216 | copyFile(ix3, nameIndexFile) 217 | 218 | // Posting list index 219 | postIndex := ix3.offset() 220 | copyFile(ix3, w.postIndexFile) 221 | 222 | ix3.writeUint32(pathData) 223 | ix3.writeUint32(nameData) 224 | ix3.writeUint32(postData) 225 | ix3.writeUint32(nameIndex) 226 | ix3.writeUint32(postIndex) 227 | ix3.writeString(trailerMagic) 228 | ix3.flush() 229 | 230 | os.Remove(nameIndexFile.name) 231 | os.Remove(w.postIndexFile.name) 232 | } 233 | 234 | type postMapReader struct { 235 | ix *Index 236 | idmap []idrange 237 | triNum uint32 238 | trigram uint32 239 | count uint32 240 | offset uint32 241 | d []byte 242 | oldid uint32 243 | fileid uint32 244 | i int 245 | } 246 | 247 | func (r *postMapReader) init(ix *Index, idmap []idrange) { 248 | r.ix = ix 249 | r.idmap = idmap 250 | r.trigram = ^uint32(0) 251 | r.load() 252 | } 253 | 254 | func (r *postMapReader) nextTrigram() { 255 | r.triNum++ 256 | r.load() 257 | } 258 | 259 | func (r *postMapReader) load() { 260 | if r.triNum >= uint32(r.ix.numPost) { 261 | r.trigram = ^uint32(0) 262 | r.count = 0 263 | r.fileid = ^uint32(0) 264 | return 265 | } 266 | r.trigram, r.count, r.offset = r.ix.listAt(r.triNum * postEntrySize) 267 | if r.count == 0 { 268 | r.fileid = ^uint32(0) 269 | return 270 | } 271 | r.d = r.ix.slice(r.ix.postData+r.offset+3, -1) 272 | r.oldid = ^uint32(0) 273 | r.i = 0 274 | } 275 | 276 | func (r *postMapReader) nextId() bool { 277 | for r.count > 0 { 278 | r.count-- 279 | delta64, n := binary.Uvarint(r.d) 280 | delta := uint32(delta64) 281 | if n <= 0 || delta == 0 { 282 | corrupt(r.ix.data.f) 283 | } 284 | r.d = r.d[n:] 285 | r.oldid += delta 286 | for r.i < len(r.idmap) && r.idmap[r.i].hi <= r.oldid { 287 | r.i++ 288 | } 289 | if r.i >= len(r.idmap) { 290 | r.count = 0 291 | break 292 | } 293 | if r.oldid < r.idmap[r.i].lo { 294 | continue 295 | } 296 | r.fileid = r.idmap[r.i].new + r.oldid - r.idmap[r.i].lo 297 | return true 298 | } 299 | 300 | r.fileid = ^uint32(0) 301 | return false 302 | } 303 | 304 | type postDataWriter struct { 305 | out *bufWriter 306 | postIndexFile *bufWriter 307 | buf [10]byte 308 | base uint32 309 | count, offset uint32 310 | last uint32 311 | t uint32 312 | } 313 | 314 | func (w *postDataWriter) init(out *bufWriter) { 315 | w.out = out 316 | w.postIndexFile = bufCreate("") 317 | w.base = out.offset() 318 | } 319 | 320 | func (w *postDataWriter) trigram(t uint32) { 321 | w.offset = w.out.offset() 322 | w.count = 0 323 | w.t = t 324 | w.last = ^uint32(0) 325 | } 326 | 327 | func (w *postDataWriter) fileid(id uint32) { 328 | if w.count == 0 { 329 | w.out.writeTrigram(w.t) 330 | } 331 | w.out.writeUvarint(id - w.last) 332 | w.last = id 333 | w.count++ 334 | } 335 | 336 | func (w *postDataWriter) endTrigram() { 337 | if w.count == 0 { 338 | return 339 | } 340 | w.out.writeUvarint(0) 341 | w.postIndexFile.writeTrigram(w.t) 342 | w.postIndexFile.writeUint32(w.count) 343 | w.postIndexFile.writeUint32(w.offset - w.base) 344 | } 345 | -------------------------------------------------------------------------------- /codesearch/index/merge_test.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | package index 6 | 7 | import ( 8 | "io/ioutil" 9 | "os" 10 | "testing" 11 | ) 12 | 13 | var mergePaths1 = []string{ 14 | "/a", 15 | "/b", 16 | "/c", 17 | } 18 | 19 | var mergePaths2 = []string{ 20 | "/b", 21 | "/cc", 22 | } 23 | 24 | var mergeFiles1 = map[string]string{ 25 | "/a/x": "hello world", 26 | "/a/y": "goodbye world", 27 | "/b/xx": "now is the time", 28 | "/b/xy": "for all good men", 29 | "/c/ab": "give me all the potatoes", 30 | "/c/de": "or give me death now", 31 | } 32 | 33 | var mergeFiles2 = map[string]string{ 34 | "/b/www": "world wide indeed", 35 | "/b/xx": "no, not now", 36 | "/b/yy": "first potatoes, now liberty?", 37 | "/cc": "come to the aid of his potatoes", 38 | } 39 | 40 | func TestMerge(t *testing.T) { 41 | f1, _ := ioutil.TempFile("", "index-test") 42 | f2, _ := ioutil.TempFile("", "index-test") 43 | f3, _ := ioutil.TempFile("", "index-test") 44 | defer os.Remove(f1.Name()) 45 | defer os.Remove(f2.Name()) 46 | defer os.Remove(f3.Name()) 47 | 48 | out1 := f1.Name() 49 | out2 := f2.Name() 50 | out3 := f3.Name() 51 | 52 | buildIndex(out1, mergePaths1, mergeFiles1) 53 | buildIndex(out2, mergePaths2, mergeFiles2) 54 | 55 | Merge(out3, out1, out2) 56 | 57 | ix1 := Open(out1) 58 | ix2 := Open(out2) 59 | ix3 := Open(out3) 60 | 61 | nameof := func(ix *Index) string { 62 | switch { 63 | case ix == ix1: 64 | return "ix1" 65 | case ix == ix2: 66 | return "ix2" 67 | case ix == ix3: 68 | return "ix3" 69 | } 70 | return "???" 71 | } 72 | 73 | checkFiles := func(ix *Index, l ...string) { 74 | for i, s := range l { 75 | if n := ix.Name(uint32(i)); n != s { 76 | t.Errorf("%s: Name(%d) = %s, want %s", nameof(ix), i, n, s) 77 | } 78 | } 79 | } 80 | 81 | checkFiles(ix1, "/a/x", "/a/y", "/b/xx", "/b/xy", "/c/ab", "/c/de") 82 | checkFiles(ix2, "/b/www", "/b/xx", "/b/yy", "/cc") 83 | checkFiles(ix3, "/a/x", "/a/y", "/b/www", "/b/xx", "/b/yy", "/c/ab", "/c/de", "/cc") 84 | 85 | check := func(ix *Index, trig string, l ...uint32) { 86 | l1 := ix.PostingList(tri(trig[0], trig[1], trig[2])) 87 | if !equalList(l1, l) { 88 | t.Errorf("PostingList(%s, %s) = %v, want %v", nameof(ix), trig, l1, l) 89 | } 90 | } 91 | 92 | check(ix1, "wor", 0, 1) 93 | check(ix1, "now", 2, 5) 94 | check(ix1, "all", 3, 4) 95 | 96 | check(ix2, "now", 1, 2) 97 | 98 | check(ix3, "all", 5) 99 | check(ix3, "wor", 0, 1, 2) 100 | check(ix3, "now", 3, 4, 6) 101 | check(ix3, "pot", 4, 5, 7) 102 | } 103 | -------------------------------------------------------------------------------- /codesearch/index/mmap_bsd.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | // +build darwin freebsd openbsd netbsd 6 | 7 | package index 8 | 9 | import ( 10 | "log" 11 | "os" 12 | "syscall" 13 | ) 14 | 15 | func mmapFile(f *os.File) mmapData { 16 | st, err := f.Stat() 17 | if err != nil { 18 | log.Fatal(err) 19 | } 20 | size := st.Size() 21 | if int64(int(size+4095)) != size+4095 { 22 | log.Fatalf("%s: too large for mmap", f.Name()) 23 | } 24 | n := int(size) 25 | if n == 0 { 26 | return mmapData{f, nil, nil} 27 | } 28 | data, err := syscall.Mmap(int(f.Fd()), 0, (n+4095)&^4095, syscall.PROT_READ, syscall.MAP_PRIVATE) 29 | if err != nil { 30 | log.Fatalf("mmap %s: %v", f.Name(), err) 31 | } 32 | return mmapData{f, data[:n], data} 33 | } 34 | 35 | func unmmapFile(m *mmapData) error { 36 | if err := syscall.Munmap(m.o); err != nil { 37 | return err 38 | } 39 | 40 | return m.f.Close() 41 | } 42 | 43 | func unmmap(d []byte) error { 44 | return syscall.Munmap(d) 45 | } 46 | -------------------------------------------------------------------------------- /codesearch/index/mmap_linux.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | package index 6 | 7 | import ( 8 | "log" 9 | "os" 10 | "syscall" 11 | ) 12 | 13 | func mmapFile(f *os.File) mmapData { 14 | st, err := f.Stat() 15 | if err != nil { 16 | log.Fatal(err) 17 | } 18 | size := st.Size() 19 | if int64(int(size+4095)) != size+4095 { 20 | log.Fatalf("%s: too large for mmap", f.Name()) 21 | } 22 | n := int(size) 23 | if n == 0 { 24 | return mmapData{f, nil, nil} 25 | } 26 | data, err := syscall.Mmap(int(f.Fd()), 0, (n+4095)&^4095, syscall.PROT_READ, syscall.MAP_SHARED) 27 | if err != nil { 28 | log.Fatalf("mmap %s: %v", f.Name(), err) 29 | } 30 | return mmapData{f, data[:n], data} 31 | } 32 | 33 | func unmmapFile(m *mmapData) error { 34 | if err := syscall.Munmap(m.o); err != nil { 35 | return err 36 | } 37 | 38 | return m.f.Close() 39 | } 40 | 41 | func unmmap(d []byte) error { 42 | return syscall.Munmap(d) 43 | } 44 | -------------------------------------------------------------------------------- /codesearch/index/mmap_windows.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | package index 6 | 7 | import ( 8 | "log" 9 | "os" 10 | "syscall" 11 | "unsafe" 12 | ) 13 | 14 | func mmapFile(f *os.File) mmapData { 15 | st, err := f.Stat() 16 | if err != nil { 17 | log.Fatal(err) 18 | } 19 | size := st.Size() 20 | if int64(int(size+4095)) != size+4095 { 21 | log.Fatalf("%s: too large for mmap", f.Name()) 22 | } 23 | if size == 0 { 24 | return mmapData{f, nil, nil} 25 | } 26 | h, err := syscall.CreateFileMapping(syscall.Handle(f.Fd()), nil, syscall.PAGE_READONLY, uint32(size>>32), uint32(size), nil) 27 | if err != nil { 28 | log.Fatalf("CreateFileMapping %s: %v", f.Name(), err) 29 | } 30 | defer syscall.CloseHandle(syscall.Handle(h)) 31 | 32 | addr, err := syscall.MapViewOfFile(h, syscall.FILE_MAP_READ, 0, 0, 0) 33 | if err != nil { 34 | log.Fatalf("MapViewOfFile %s: %v", f.Name(), err) 35 | } 36 | 37 | data := (*[1 << 30]byte)(unsafe.Pointer(addr)) 38 | return mmapData{f, data[:size], data[:]} 39 | } 40 | 41 | func unmmapFile(m *mmapData) error { 42 | err := syscall.UnmapViewOfFile(uintptr(unsafe.Pointer(&m.d[0]))) 43 | if err != nil { 44 | return err 45 | } 46 | 47 | return m.f.Close() 48 | } 49 | 50 | func unmmap(d []byte) error { 51 | return syscall.UnmapViewOfFile(uintptr(unsafe.Pointer(&d))) 52 | } -------------------------------------------------------------------------------- /codesearch/index/read_test.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | package index 6 | 7 | import ( 8 | "io/ioutil" 9 | "os" 10 | "testing" 11 | ) 12 | 13 | var postFiles = map[string]string{ 14 | "file0": "", 15 | "file1": "Google Code Search", 16 | "file2": "Google Code Project Hosting", 17 | "file3": "Google Web Search", 18 | } 19 | 20 | func tri(x, y, z byte) uint32 { 21 | return uint32(x)<<16 | uint32(y)<<8 | uint32(z) 22 | } 23 | 24 | func TestTrivialPosting(t *testing.T) { 25 | f, _ := ioutil.TempFile("", "index-test") 26 | defer os.Remove(f.Name()) 27 | out := f.Name() 28 | buildIndex(out, nil, postFiles) 29 | ix := Open(out) 30 | if l := ix.PostingList(tri('S', 'e', 'a')); !equalList(l, []uint32{1, 3}) { 31 | t.Errorf("PostingList(Sea) = %v, want [1 3]", l) 32 | } 33 | if l := ix.PostingList(tri('G', 'o', 'o')); !equalList(l, []uint32{1, 2, 3}) { 34 | t.Errorf("PostingList(Goo) = %v, want [1 2 3]", l) 35 | } 36 | if l := ix.PostingAnd(ix.PostingList(tri('S', 'e', 'a')), tri('G', 'o', 'o')); !equalList(l, []uint32{1, 3}) { 37 | t.Errorf("PostingList(Sea&Goo) = %v, want [1 3]", l) 38 | } 39 | if l := ix.PostingAnd(ix.PostingList(tri('G', 'o', 'o')), tri('S', 'e', 'a')); !equalList(l, []uint32{1, 3}) { 40 | t.Errorf("PostingList(Goo&Sea) = %v, want [1 3]", l) 41 | } 42 | if l := ix.PostingOr(ix.PostingList(tri('S', 'e', 'a')), tri('G', 'o', 'o')); !equalList(l, []uint32{1, 2, 3}) { 43 | t.Errorf("PostingList(Sea|Goo) = %v, want [1 2 3]", l) 44 | } 45 | if l := ix.PostingOr(ix.PostingList(tri('G', 'o', 'o')), tri('S', 'e', 'a')); !equalList(l, []uint32{1, 2, 3}) { 46 | t.Errorf("PostingList(Goo|Sea) = %v, want [1 2 3]", l) 47 | } 48 | } 49 | 50 | func equalList(x, y []uint32) bool { 51 | if len(x) != len(y) { 52 | return false 53 | } 54 | for i, xi := range x { 55 | if xi != y[i] { 56 | return false 57 | } 58 | } 59 | return true 60 | } 61 | -------------------------------------------------------------------------------- /codesearch/index/regexp_test.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | package index 6 | 7 | import ( 8 | "regexp/syntax" 9 | "testing" 10 | ) 11 | 12 | var queryTests = []struct { 13 | re string 14 | q string 15 | }{ 16 | {`Abcdef`, `"Abc" "bcd" "cde" "def"`}, 17 | {`(abc)(def)`, `"abc" "bcd" "cde" "def"`}, 18 | {`abc.*(def|ghi)`, `"abc" ("def"|"ghi")`}, 19 | {`abc(def|ghi)`, `"abc" ("bcd" "cde" "def")|("bcg" "cgh" "ghi")`}, 20 | {`a+hello`, `"ahe" "ell" "hel" "llo"`}, 21 | {`(a+hello|b+world)`, `("ahe" "ell" "hel" "llo")|("bwo" "orl" "rld" "wor")`}, 22 | {`a*bbb`, `"bbb"`}, 23 | {`a?bbb`, `"bbb"`}, 24 | {`(bbb)a?`, `"bbb"`}, 25 | {`(bbb)a*`, `"bbb"`}, 26 | {`^abc`, `"abc"`}, 27 | {`abc$`, `"abc"`}, 28 | {`ab[cde]f`, `("abc" "bcf")|("abd" "bdf")|("abe" "bef")`}, 29 | {`(abc|bac)de`, `"cde" ("abc" "bcd")|("acd" "bac")`}, 30 | 31 | // These don't have enough letters for a trigram, so they return the 32 | // always matching query "+". 33 | {`ab[^cde]f`, `+`}, 34 | {`ab.f`, `+`}, 35 | {`.`, `+`}, 36 | {`()`, `+`}, 37 | 38 | // No matches. 39 | {`[^\s\S]`, `-`}, 40 | 41 | // Factoring works. 42 | {`(abc|abc)`, `"abc"`}, 43 | {`(ab|ab)c`, `"abc"`}, 44 | {`ab(cab|cat)`, `"abc" "bca" ("cab"|"cat")`}, 45 | {`(z*(abc|def)z*)(z*(abc|def)z*)`, `("abc"|"def")`}, 46 | {`(z*abcz*defz*)|(z*abcz*defz*)`, `"abc" "def"`}, 47 | {`(z*abcz*defz*(ghi|jkl)z*)|(z*abcz*defz*(mno|prs)z*)`, 48 | `"abc" "def" ("ghi"|"jkl"|"mno"|"prs")`}, 49 | {`(z*(abcz*def)|(ghiz*jkl)z*)|(z*(mnoz*prs)|(tuvz*wxy)z*)`, 50 | `("abc" "def")|("ghi" "jkl")|("mno" "prs")|("tuv" "wxy")`}, 51 | {`(z*abcz*defz*)(z*(ghi|jkl)z*)`, `"abc" "def" ("ghi"|"jkl")`}, 52 | {`(z*abcz*defz*)|(z*(ghi|jkl)z*)`, `("ghi"|"jkl")|("abc" "def")`}, 53 | 54 | // analyze keeps track of multiple possible prefix/suffixes. 55 | {`[ab][cd][ef]`, `("ace"|"acf"|"ade"|"adf"|"bce"|"bcf"|"bde"|"bdf")`}, 56 | {`ab[cd]e`, `("abc" "bce")|("abd" "bde")`}, 57 | 58 | // Different sized suffixes. 59 | {`(a|ab)cde`, `"cde" ("abc" "bcd")|("acd")`}, 60 | {`(a|b|c|d)(ef|g|hi|j)`, `+`}, 61 | 62 | {`(?s).`, `+`}, 63 | 64 | // Expanding case. 65 | {`(?i)a~~`, `("A~~"|"a~~")`}, 66 | {`(?i)ab~`, `("AB~"|"Ab~"|"aB~"|"ab~")`}, 67 | {`(?i)abc`, `("ABC"|"ABc"|"AbC"|"Abc"|"aBC"|"aBc"|"abC"|"abc")`}, 68 | {`(?i)abc|def`, `("ABC"|"ABc"|"AbC"|"Abc"|"DEF"|"DEf"|"DeF"|"Def"|"aBC"|"aBc"|"abC"|"abc"|"dEF"|"dEf"|"deF"|"def")`}, 69 | {`(?i)abcd`, `("ABC"|"ABc"|"AbC"|"Abc"|"aBC"|"aBc"|"abC"|"abc") ("BCD"|"BCd"|"BcD"|"Bcd"|"bCD"|"bCd"|"bcD"|"bcd")`}, 70 | {`(?i)abc|abc`, `("ABC"|"ABc"|"AbC"|"Abc"|"aBC"|"aBc"|"abC"|"abc")`}, 71 | 72 | // Word boundary. 73 | {`\b`, `+`}, 74 | {`\B`, `+`}, 75 | {`\babc`, `"abc"`}, 76 | {`\Babc`, `"abc"`}, 77 | {`abc\b`, `"abc"`}, 78 | {`abc\B`, `"abc"`}, 79 | {`ab\bc`, `"abc"`}, 80 | {`ab\Bc`, `"abc"`}, 81 | } 82 | 83 | func TestQuery(t *testing.T) { 84 | for _, tt := range queryTests { 85 | re, err := syntax.Parse(tt.re, syntax.Perl) 86 | if err != nil { 87 | t.Fatal(err) 88 | } 89 | q := RegexpQuery(re).String() 90 | if q != tt.q { 91 | t.Errorf("RegexpQuery(%#q) = %#q, want %#q", tt.re, q, tt.q) 92 | } 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /codesearch/index/write_test.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | package index 6 | 7 | import ( 8 | "bytes" 9 | "io/ioutil" 10 | "os" 11 | "sort" 12 | "strings" 13 | "testing" 14 | ) 15 | 16 | var trivialFiles = map[string]string{ 17 | "f0": "\n\n", 18 | "file1": "\na\n", 19 | "thefile2": "\nab\n", 20 | "file3": "\nabc\n", 21 | "afile4": "\ndabc\n", 22 | "file5": "\nxyzw\n", 23 | } 24 | 25 | var trivialIndex = join( 26 | // header 27 | "csearch index 1\n", 28 | 29 | // list of paths 30 | "\x00", 31 | 32 | // list of names 33 | "afile4\x00", 34 | "f0\x00", 35 | "file1\x00", 36 | "file3\x00", 37 | "file5\x00", 38 | "thefile2\x00", 39 | "\x00", 40 | 41 | // list of posting lists 42 | "\na\n", fileList(2), // file1 43 | "\nab", fileList(3, 5), // file3, thefile2 44 | "\nda", fileList(0), // afile4 45 | "\nxy", fileList(4), // file5 46 | "ab\n", fileList(5), // thefile2 47 | "abc", fileList(0, 3), // afile4, file3 48 | "bc\n", fileList(0, 3), // afile4, file3 49 | "dab", fileList(0), // afile4 50 | "xyz", fileList(4), // file5 51 | "yzw", fileList(4), // file5 52 | "zw\n", fileList(4), // file5 53 | "\xff\xff\xff", fileList(), 54 | 55 | // name index 56 | u32(0), 57 | u32(6+1), 58 | u32(6+1+2+1), 59 | u32(6+1+2+1+5+1), 60 | u32(6+1+2+1+5+1+5+1), 61 | u32(6+1+2+1+5+1+5+1+5+1), 62 | u32(6+1+2+1+5+1+5+1+5+1+8+1), 63 | 64 | // posting list index, 65 | "\na\n", u32(1), u32(0), 66 | "\nab", u32(2), u32(5), 67 | "\nda", u32(1), u32(5+6), 68 | "\nxy", u32(1), u32(5+6+5), 69 | "ab\n", u32(1), u32(5+6+5+5), 70 | "abc", u32(2), u32(5+6+5+5+5), 71 | "bc\n", u32(2), u32(5+6+5+5+5+6), 72 | "dab", u32(1), u32(5+6+5+5+5+6+6), 73 | "xyz", u32(1), u32(5+6+5+5+5+6+6+5), 74 | "yzw", u32(1), u32(5+6+5+5+5+6+6+5+5), 75 | "zw\n", u32(1), u32(5+6+5+5+5+6+6+5+5+5), 76 | "\xff\xff\xff", u32(0), u32(5+6+5+5+5+6+6+5+5+5+5), 77 | 78 | // trailer 79 | u32(16), 80 | u32(16+1), 81 | u32(16+1+38), 82 | u32(16+1+38+62), 83 | u32(16+1+38+62+28), 84 | 85 | "\ncsearch trailr\n", 86 | ) 87 | 88 | func join(s ...string) string { 89 | return strings.Join(s, "") 90 | } 91 | 92 | func u32(x uint32) string { 93 | var buf [4]byte 94 | buf[0] = byte(x >> 24) 95 | buf[1] = byte(x >> 16) 96 | buf[2] = byte(x >> 8) 97 | buf[3] = byte(x) 98 | return string(buf[:]) 99 | } 100 | 101 | func fileList(list ...uint32) string { 102 | var buf []byte 103 | 104 | last := ^uint32(0) 105 | for _, x := range list { 106 | delta := x - last 107 | for delta >= 0x80 { 108 | buf = append(buf, byte(delta)|0x80) 109 | delta >>= 7 110 | } 111 | buf = append(buf, byte(delta)) 112 | last = x 113 | } 114 | buf = append(buf, 0) 115 | return string(buf) 116 | } 117 | 118 | func buildFlushIndex(out string, paths []string, doFlush bool, fileData map[string]string) { 119 | ix := Create(out) 120 | ix.AddPaths(paths) 121 | var files []string 122 | for name := range fileData { 123 | files = append(files, name) 124 | } 125 | sort.Strings(files) 126 | for _, name := range files { 127 | ix.Add(name, strings.NewReader(fileData[name])) 128 | } 129 | if doFlush { 130 | ix.flushPost() 131 | } 132 | ix.Flush() 133 | } 134 | 135 | func buildIndex(name string, paths []string, fileData map[string]string) { 136 | buildFlushIndex(name, paths, false, fileData) 137 | } 138 | 139 | func testTrivialWrite(t *testing.T, doFlush bool) { 140 | f, _ := ioutil.TempFile("", "index-test") 141 | defer os.Remove(f.Name()) 142 | out := f.Name() 143 | buildFlushIndex(out, nil, doFlush, trivialFiles) 144 | 145 | data, err := ioutil.ReadFile(out) 146 | if err != nil { 147 | t.Fatalf("reading _test/index.triv: %v", err) 148 | } 149 | want := []byte(trivialIndex) 150 | if !bytes.Equal(data, want) { 151 | i := 0 152 | for i < len(data) && i < len(want) && data[i] == want[i] { 153 | i++ 154 | } 155 | t.Fatalf("wrong index:\nhave: %q %q\nwant: %q %q", data[:i], data[i:], want[:i], want[i:]) 156 | } 157 | } 158 | 159 | func TestTrivialWrite(t *testing.T) { 160 | testTrivialWrite(t, false) 161 | } 162 | 163 | func TestTrivialWriteDisk(t *testing.T) { 164 | testTrivialWrite(t, true) 165 | } 166 | 167 | func TestHeap(t *testing.T) { 168 | h := &postHeap{} 169 | es := []postEntry{7, 4, 3, 2, 4} 170 | for _, e := range es { 171 | h.addMem([]postEntry{e}) 172 | } 173 | if len(h.ch) != len(es) { 174 | t.Fatalf("wrong heap size: %d, want %d", len(h.ch), len(es)) 175 | } 176 | for a, b := h.next(), h.next(); b.trigram() != (1<<24 - 1); a, b = b, h.next() { 177 | if a > b { 178 | t.Fatalf("%d should <= %d", a, b) 179 | } 180 | } 181 | } 182 | -------------------------------------------------------------------------------- /codesearch/lib/README.template: -------------------------------------------------------------------------------- 1 | These are the command-line Code Search tools from 2 | https://code.google.com/p/codesearch. 3 | 4 | These binaries are for ARCH systems running OPERSYS. 5 | 6 | To get started, run cindex with a list of directories to index: 7 | 8 | cindex /usr/include $HOME/src 9 | 10 | Then run csearch to run grep over all the indexed sources: 11 | 12 | csearch DATAKIT 13 | 14 | For details, run either command with the -help option, and 15 | read http://swtch.com/~rsc/regexp/regexp4.html. 16 | -------------------------------------------------------------------------------- /codesearch/lib/buildall: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script builds the code search binaries for a variety of OS/architecture combinations. 4 | 5 | . ./setup 6 | 7 | for i in {5,6,8}{c,g,a,l} 8 | do 9 | go tool dist install cmd/$i 10 | done 11 | 12 | build() { 13 | echo "# $1" 14 | goos=$(echo $1 | sed 's;/.*;;') 15 | goarch=$(echo $1 | sed 's;.*/;;') 16 | GOOS=$goos GOARCH=$goarch CGO_ENABLED=0 \ 17 | go install -a code.google.com/p/codesearch/cmd/{cgrep,cindex,csearch} 18 | rm -rf codesearch-$version 19 | mkdir codesearch-$version 20 | mv ~/g/bin/{cgrep,cindex,csearch}* codesearch-$version 21 | chmod +x codesearch-$version/* 22 | cat README.template | sed "s/ARCH/$(arch $goarch)/; s/OPERSYS/$(os $goos)/" >codesearch-$version/README.txt 23 | rm -f codesearch-$version-$goos-$goarch.zip 24 | zip -z -r codesearch-$version-$goos-$goarch.zip codesearch-$version < codesearch-$version/README.txt 25 | rm -rf codesearch-0.01 26 | } 27 | 28 | for i in {linux,darwin,freebsd,windows}/{amd64,386} 29 | do 30 | build $i 31 | done 32 | -------------------------------------------------------------------------------- /codesearch/lib/setup: -------------------------------------------------------------------------------- 1 | set -e 2 | 3 | os() { 4 | case "$1" in 5 | freebsd) echo FreeBSD;; 6 | linux) echo Linux;; 7 | darwin) echo Mac OS X;; 8 | openbsd) echo OpenBSD;; 9 | netbsd) echo NetBSD;; 10 | windows) echo Windows;; 11 | *) echo $1;; 12 | esac 13 | } 14 | 15 | arch() { 16 | case "$1" in 17 | 386) echo 32-bit x86;; 18 | amd64) echo 64-bit x86;; 19 | *) echo $1;; 20 | esac 21 | } 22 | 23 | version=$(cat version) 24 | -------------------------------------------------------------------------------- /codesearch/lib/uploadall: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # gcodeup is a copy of $GOROOT/misc/dashboard/googlecode_upload.py. 4 | 5 | . ./setup 6 | user=$(sed -n 's/^re2.username = //' ~/.hgrc) 7 | password=$(sed -n 's/^re2\.password = //' ~/.hgrc) 8 | 9 | upload() { 10 | goos=$(echo $1 | sed "s/codesearch-$version-//; s/-.*//") 11 | goarch=$(echo $1 | sed "s/codesearch-$version-//; s/[a-z0-9]*-//; s/-.*//") 12 | gcodeup -s "binaries for $(os $goos) $(arch $goarch)" -p codesearch -u "$user" -w "$password" codesearch-$version-$1-$2.zip 13 | } 14 | 15 | for i in codesearch-$version-* 16 | do 17 | upload $i 18 | done 19 | -------------------------------------------------------------------------------- /codesearch/lib/version: -------------------------------------------------------------------------------- 1 | 0.01 2 | -------------------------------------------------------------------------------- /codesearch/regexp/copy.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | // Copied from Go's regexp/syntax. 6 | // Formatters edited to handle instByteRange. 7 | 8 | package regexp 9 | 10 | import ( 11 | "bytes" 12 | "fmt" 13 | "regexp/syntax" 14 | "sort" 15 | "strconv" 16 | "unicode" 17 | ) 18 | 19 | // cleanClass sorts the ranges (pairs of elements of r), 20 | // merges them, and eliminates duplicates. 21 | func cleanClass(rp *[]rune) []rune { 22 | 23 | // Sort by lo increasing, hi decreasing to break ties. 24 | sort.Sort(ranges{rp}) 25 | 26 | r := *rp 27 | if len(r) < 2 { 28 | return r 29 | } 30 | 31 | // Merge abutting, overlapping. 32 | w := 2 // write index 33 | for i := 2; i < len(r); i += 2 { 34 | lo, hi := r[i], r[i+1] 35 | if lo <= r[w-1]+1 { 36 | // merge with previous range 37 | if hi > r[w-1] { 38 | r[w-1] = hi 39 | } 40 | continue 41 | } 42 | // new disjoint range 43 | r[w] = lo 44 | r[w+1] = hi 45 | w += 2 46 | } 47 | 48 | return r[:w] 49 | } 50 | 51 | // appendRange returns the result of appending the range lo-hi to the class r. 52 | func appendRange(r []rune, lo, hi rune) []rune { 53 | // Expand last range or next to last range if it overlaps or abuts. 54 | // Checking two ranges helps when appending case-folded 55 | // alphabets, so that one range can be expanding A-Z and the 56 | // other expanding a-z. 57 | n := len(r) 58 | for i := 2; i <= 4; i += 2 { // twice, using i=2, i=4 59 | if n >= i { 60 | rlo, rhi := r[n-i], r[n-i+1] 61 | if lo <= rhi+1 && rlo <= hi+1 { 62 | if lo < rlo { 63 | r[n-i] = lo 64 | } 65 | if hi > rhi { 66 | r[n-i+1] = hi 67 | } 68 | return r 69 | } 70 | } 71 | } 72 | 73 | return append(r, lo, hi) 74 | } 75 | 76 | const ( 77 | // minimum and maximum runes involved in folding. 78 | // checked during test. 79 | minFold = 0x0041 80 | maxFold = 0x1044f 81 | ) 82 | 83 | // appendFoldedRange returns the result of appending the range lo-hi 84 | // and its case folding-equivalent runes to the class r. 85 | func appendFoldedRange(r []rune, lo, hi rune) []rune { 86 | // Optimizations. 87 | if lo <= minFold && hi >= maxFold { 88 | // Range is full: folding can't add more. 89 | return appendRange(r, lo, hi) 90 | } 91 | if hi < minFold || lo > maxFold { 92 | // Range is outside folding possibilities. 93 | return appendRange(r, lo, hi) 94 | } 95 | if lo < minFold { 96 | // [lo, minFold-1] needs no folding. 97 | r = appendRange(r, lo, minFold-1) 98 | lo = minFold 99 | } 100 | if hi > maxFold { 101 | // [maxFold+1, hi] needs no folding. 102 | r = appendRange(r, maxFold+1, hi) 103 | hi = maxFold 104 | } 105 | 106 | // Brute force. Depend on appendRange to coalesce ranges on the fly. 107 | for c := lo; c <= hi; c++ { 108 | r = appendRange(r, c, c) 109 | f := unicode.SimpleFold(c) 110 | for f != c { 111 | r = appendRange(r, f, f) 112 | f = unicode.SimpleFold(f) 113 | } 114 | } 115 | return r 116 | } 117 | 118 | // ranges implements sort.Interface on a []rune. 119 | // The choice of receiver type definition is strange 120 | // but avoids an allocation since we already have 121 | // a *[]rune. 122 | type ranges struct { 123 | p *[]rune 124 | } 125 | 126 | func (ra ranges) Less(i, j int) bool { 127 | p := *ra.p 128 | i *= 2 129 | j *= 2 130 | return p[i] < p[j] || p[i] == p[j] && p[i+1] > p[j+1] 131 | } 132 | 133 | func (ra ranges) Len() int { 134 | return len(*ra.p) / 2 135 | } 136 | 137 | func (ra ranges) Swap(i, j int) { 138 | p := *ra.p 139 | i *= 2 140 | j *= 2 141 | p[i], p[i+1], p[j], p[j+1] = p[j], p[j+1], p[i], p[i+1] 142 | } 143 | 144 | func progString(p *syntax.Prog) string { 145 | var b bytes.Buffer 146 | dumpProg(&b, p) 147 | return b.String() 148 | } 149 | 150 | func instString(i *syntax.Inst) string { 151 | var b bytes.Buffer 152 | dumpInst(&b, i) 153 | return b.String() 154 | } 155 | 156 | func bw(b *bytes.Buffer, args ...string) { 157 | for _, s := range args { 158 | b.WriteString(s) 159 | } 160 | } 161 | 162 | func dumpProg(b *bytes.Buffer, p *syntax.Prog) { 163 | for j := range p.Inst { 164 | i := &p.Inst[j] 165 | pc := strconv.Itoa(j) 166 | if len(pc) < 3 { 167 | b.WriteString(" "[len(pc):]) 168 | } 169 | if j == p.Start { 170 | pc += "*" 171 | } 172 | bw(b, pc, "\t") 173 | dumpInst(b, i) 174 | bw(b, "\n") 175 | } 176 | } 177 | 178 | func u32(i uint32) string { 179 | return strconv.FormatUint(uint64(i), 10) 180 | } 181 | 182 | func dumpInst(b *bytes.Buffer, i *syntax.Inst) { 183 | switch i.Op { 184 | case syntax.InstAlt: 185 | bw(b, "alt -> ", u32(i.Out), ", ", u32(i.Arg)) 186 | case syntax.InstAltMatch: 187 | bw(b, "altmatch -> ", u32(i.Out), ", ", u32(i.Arg)) 188 | case syntax.InstCapture: 189 | bw(b, "cap ", u32(i.Arg), " -> ", u32(i.Out)) 190 | case syntax.InstEmptyWidth: 191 | bw(b, "empty ", u32(i.Arg), " -> ", u32(i.Out)) 192 | case syntax.InstMatch: 193 | bw(b, "match") 194 | case syntax.InstFail: 195 | bw(b, "fail") 196 | case syntax.InstNop: 197 | bw(b, "nop -> ", u32(i.Out)) 198 | case instByteRange: 199 | fmt.Fprintf(b, "byte %02x-%02x", (i.Arg>>8)&0xFF, i.Arg&0xFF) 200 | if i.Arg&argFold != 0 { 201 | bw(b, "/i") 202 | } 203 | bw(b, " -> ", u32(i.Out)) 204 | 205 | // Should not happen 206 | case syntax.InstRune: 207 | if i.Rune == nil { 208 | // shouldn't happen 209 | bw(b, "rune ") 210 | } 211 | bw(b, "rune ", strconv.QuoteToASCII(string(i.Rune))) 212 | if syntax.Flags(i.Arg)&syntax.FoldCase != 0 { 213 | bw(b, "/i") 214 | } 215 | bw(b, " -> ", u32(i.Out)) 216 | case syntax.InstRune1: 217 | bw(b, "rune1 ", strconv.QuoteToASCII(string(i.Rune)), " -> ", u32(i.Out)) 218 | case syntax.InstRuneAny: 219 | bw(b, "any -> ", u32(i.Out)) 220 | case syntax.InstRuneAnyNotNL: 221 | bw(b, "anynotnl -> ", u32(i.Out)) 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /codesearch/regexp/regexp.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | // Package regexp implements regular expression search tuned for 6 | // use in grep-like programs. 7 | package regexp 8 | 9 | import "regexp/syntax" 10 | 11 | func bug() { 12 | panic("codesearch/regexp: internal error") 13 | } 14 | 15 | // Regexp is the representation of a compiled regular expression. 16 | // A Regexp is NOT SAFE for concurrent use by multiple goroutines. 17 | type Regexp struct { 18 | Syntax *syntax.Regexp 19 | expr string // original expression 20 | m matcher 21 | } 22 | 23 | // String returns the source text used to compile the regular expression. 24 | func (re *Regexp) String() string { 25 | return re.expr 26 | } 27 | 28 | // Compile parses a regular expression and returns, if successful, 29 | // a Regexp object that can be used to match against lines of text. 30 | func Compile(expr string) (*Regexp, error) { 31 | re, err := syntax.Parse(expr, syntax.Perl) 32 | if err != nil { 33 | return nil, err 34 | } 35 | sre := re.Simplify() 36 | prog, err := syntax.Compile(sre) 37 | if err != nil { 38 | return nil, err 39 | } 40 | if err := toByteProg(prog); err != nil { 41 | return nil, err 42 | } 43 | r := &Regexp{ 44 | Syntax: re, 45 | expr: expr, 46 | } 47 | if err := r.m.init(prog); err != nil { 48 | return nil, err 49 | } 50 | return r, nil 51 | } 52 | 53 | func (r *Regexp) Match(b []byte, beginText, endText bool) (end int) { 54 | return r.m.match(b, beginText, endText) 55 | } 56 | 57 | func (r *Regexp) MatchString(s string, beginText, endText bool) (end int) { 58 | return r.m.matchString(s, beginText, endText) 59 | } 60 | -------------------------------------------------------------------------------- /codesearch/regexp/regexp_test.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | package regexp 6 | 7 | import ( 8 | "bytes" 9 | "reflect" 10 | "strings" 11 | "testing" 12 | ) 13 | 14 | var nstateTests = []struct { 15 | q []uint32 16 | partial rune 17 | }{ 18 | {[]uint32{1, 2, 3}, 1}, 19 | {[]uint32{1}, 1}, 20 | {[]uint32{}, 0}, 21 | {[]uint32{1, 2, 8}, 0x10FFF}, 22 | } 23 | 24 | func TestNstateEnc(t *testing.T) { 25 | var n1, n2 nstate 26 | n1.q.Init(10) 27 | n2.q.Init(10) 28 | for _, tt := range nstateTests { 29 | n1.q.Reset() 30 | n1.partial = tt.partial 31 | for _, id := range tt.q { 32 | n1.q.Add(id) 33 | } 34 | enc := n1.enc() 35 | n2.dec(enc) 36 | if n2.partial != n1.partial || !reflect.DeepEqual(n1.q.Dense(), n2.q.Dense()) { 37 | t.Errorf("%v.enc.dec = %v", &n1, &n2) 38 | } 39 | } 40 | } 41 | 42 | var matchTests = []struct { 43 | re string 44 | s string 45 | m []int 46 | }{ 47 | // Adapted from go/src/pkg/regexp/find_test.go. 48 | {`a+`, "abc\ndef\nghi\n", []int{1}}, 49 | {``, ``, []int{1}}, 50 | {`^abcdefg`, "abcdefg", []int{1}}, 51 | {`a+`, "baaab", []int{1}}, 52 | {"abcd..", "abcdef", []int{1}}, 53 | {`a`, "a", []int{1}}, 54 | {`x`, "y", nil}, 55 | {`b`, "abc", []int{1}}, 56 | {`.`, "a", []int{1}}, 57 | {`.*`, "abcdef", []int{1}}, 58 | {`^`, "abcde", []int{1}}, 59 | {`$`, "abcde", []int{1}}, 60 | {`^abcd$`, "abcd", []int{1}}, 61 | {`^bcd'`, "abcdef", nil}, 62 | {`^abcd$`, "abcde", nil}, 63 | {`a+`, "baaab", []int{1}}, 64 | {`a*`, "baaab", []int{1}}, 65 | {`[a-z]+`, "abcd", []int{1}}, 66 | {`[^a-z]+`, "ab1234cd", []int{1}}, 67 | {`[a\-\]z]+`, "az]-bcz", []int{1}}, 68 | {`[^\n]+`, "abcd\n", []int{1}}, 69 | {`[日本語]+`, "日本語日本語", []int{1}}, 70 | {`日本語+`, "日本語", []int{1}}, 71 | {`日本語+`, "日本語語語語", []int{1}}, 72 | {`()`, "", []int{1}}, 73 | {`(a)`, "a", []int{1}}, 74 | {`(.)(.)`, "日a", []int{1}}, 75 | {`(.*)`, "", []int{1}}, 76 | {`(.*)`, "abcd", []int{1}}, 77 | {`(..)(..)`, "abcd", []int{1}}, 78 | {`(([^xyz]*)(d))`, "abcd", []int{1}}, 79 | {`((a|b|c)*(d))`, "abcd", []int{1}}, 80 | {`(((a|b|c)*)(d))`, "abcd", []int{1}}, 81 | {`\a\f\r\t\v`, "\a\f\r\t\v", []int{1}}, 82 | {`[\a\f\n\r\t\v]+`, "\a\f\r\t\v", []int{1}}, 83 | 84 | {`a*(|(b))c*`, "aacc", []int{1}}, 85 | {`(.*).*`, "ab", []int{1}}, 86 | {`[.]`, ".", []int{1}}, 87 | {`/$`, "/abc/", []int{1}}, 88 | {`/$`, "/abc", nil}, 89 | 90 | // multiple matches 91 | {`.`, "abc", []int{1}}, 92 | {`(.)`, "abc", []int{1}}, 93 | {`.(.)`, "abcd", []int{1}}, 94 | {`ab*`, "abbaab", []int{1}}, 95 | {`a(b*)`, "abbaab", []int{1}}, 96 | 97 | // fixed bugs 98 | {`ab$`, "cab", []int{1}}, 99 | {`axxb$`, "axxcb", nil}, 100 | {`data`, "daXY data", []int{1}}, 101 | {`da(.)a$`, "daXY data", []int{1}}, 102 | {`zx+`, "zzx", []int{1}}, 103 | {`ab$`, "abcab", []int{1}}, 104 | {`(aa)*$`, "a", []int{1}}, 105 | {`(?:.|(?:.a))`, "", nil}, 106 | {`(?:A(?:A|a))`, "Aa", []int{1}}, 107 | {`(?:A|(?:A|a))`, "a", []int{1}}, 108 | {`(a){0}`, "", []int{1}}, 109 | // {`(?-s)(?:(?:^).)`, "\n", nil}, 110 | // {`(?s)(?:(?:^).)`, "\n", []int{1}}, 111 | // {`(?:(?:^).)`, "\n", nil}, 112 | {`\b`, "x", []int{1}}, 113 | {`\b`, "xx", []int{1}}, 114 | {`\b`, "x y", []int{1}}, 115 | {`\b`, "xx yy", []int{1}}, 116 | {`\B`, "x", nil}, 117 | {`\B`, "xx", []int{1}}, 118 | {`\B`, "x y", nil}, 119 | {`\B`, "xx yy", []int{1}}, 120 | {`(?im)^[abc]+$`, "abcABC", []int{1}}, 121 | {`(?im)^[α]+$`, "αΑ", []int{1}}, 122 | {`[Aa]BC`, "abc", nil}, 123 | {`[Aa]bc`, "abc", []int{1}}, 124 | 125 | // RE2 tests 126 | {`[^\S\s]`, "abcd", nil}, 127 | {`[^\S[:space:]]`, "abcd", nil}, 128 | {`[^\D\d]`, "abcd", nil}, 129 | {`[^\D[:digit:]]`, "abcd", nil}, 130 | {`(?i)\W`, "x", nil}, 131 | {`(?i)\W`, "k", nil}, 132 | {`(?i)\W`, "s", nil}, 133 | 134 | // can backslash-escape any punctuation 135 | {`\!\"\#\$\%\&\'\(\)\*\+\,\-\.\/\:\;\<\=\>\?\@\[\\\]\^\_\{\|\}\~`, 136 | `!"#$%&'()*+,-./:;<=>?@[\]^_{|}~`, []int{1}}, 137 | {`[\!\"\#\$\%\&\'\(\)\*\+\,\-\.\/\:\;\<\=\>\?\@\[\\\]\^\_\{\|\}\~]+`, 138 | `!"#$%&'()*+,-./:;<=>?@[\]^_{|}~`, []int{1}}, 139 | {"\\`", "`", []int{1}}, 140 | {"[\\`]+", "`", []int{1}}, 141 | 142 | // long set of matches (longer than startSize) 143 | { 144 | ".", 145 | "qwertyuiopasdfghjklzxcvbnm1234567890", 146 | []int{1}, 147 | }, 148 | } 149 | 150 | func TestMatch(t *testing.T) { 151 | for _, tt := range matchTests { 152 | re, err := Compile("(?m)" + tt.re) 153 | if err != nil { 154 | t.Errorf("Compile(%#q): %v", tt.re, err) 155 | continue 156 | } 157 | b := []byte(tt.s) 158 | lines := grep(re, b) 159 | if !reflect.DeepEqual(lines, tt.m) { 160 | t.Errorf("grep(%#q, %q) = %v, want %v", tt.re, tt.s, lines, tt.m) 161 | } 162 | } 163 | } 164 | 165 | func grep(re *Regexp, b []byte) []int { 166 | var m []int 167 | lineno := 1 168 | for { 169 | i := re.Match(b, true, true) 170 | if i < 0 { 171 | break 172 | } 173 | start := bytes.LastIndex(b[:i], nl) + 1 174 | end := i + 1 175 | if end > len(b) { 176 | end = len(b) 177 | } 178 | lineno += bytes.Count(b[:start], nl) 179 | m = append(m, lineno) 180 | if start < end && b[end-1] == '\n' { 181 | lineno++ 182 | } 183 | b = b[end:] 184 | if len(b) == 0 { 185 | break 186 | } 187 | } 188 | return m 189 | } 190 | 191 | var grepTests = []struct { 192 | re string 193 | s string 194 | out string 195 | err string 196 | g Grep 197 | }{ 198 | {re: `a+`, s: "abc\ndef\nghalloo\n", out: "input:abc\ninput:ghalloo\n"}, 199 | {re: `x.*y`, s: "xay\nxa\ny\n", out: "input:xay\n"}, 200 | } 201 | 202 | func TestGrep(t *testing.T) { 203 | for i, tt := range grepTests { 204 | re, err := Compile("(?m)" + tt.re) 205 | if err != nil { 206 | t.Errorf("Compile(%#q): %v", tt.re, err) 207 | continue 208 | } 209 | g := tt.g 210 | g.Regexp = re 211 | var out, errb bytes.Buffer 212 | g.Stdout = &out 213 | g.Stderr = &errb 214 | g.Reader(strings.NewReader(tt.s), "input") 215 | if out.String() != tt.out || errb.String() != tt.err { 216 | t.Errorf("#%d: grep(%#q, %q) = %q, %q, want %q, %q", i, tt.re, tt.s, out.String(), errb.String(), tt.out, tt.err) 217 | } 218 | } 219 | } 220 | -------------------------------------------------------------------------------- /codesearch/regexp/utf.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | package regexp 6 | 7 | import ( 8 | "regexp/syntax" 9 | "unicode" 10 | "unicode/utf8" 11 | ) 12 | 13 | const ( 14 | instFail = syntax.InstFail 15 | instAlt = syntax.InstAlt 16 | instByteRange = syntax.InstRune | 0x80 // local opcode 17 | 18 | argFold = 1 << 16 19 | ) 20 | 21 | func toByteProg(prog *syntax.Prog) error { 22 | var b runeBuilder 23 | for pc := range prog.Inst { 24 | i := &prog.Inst[pc] 25 | switch i.Op { 26 | case syntax.InstRune, syntax.InstRune1: 27 | // General rune range. PIA. 28 | // TODO: Pick off single-byte case. 29 | if lo, hi, fold, ok := oneByteRange(i); ok { 30 | i.Op = instByteRange 31 | i.Arg = uint32(lo)<<8 | uint32(hi) 32 | if fold { 33 | i.Arg |= argFold 34 | } 35 | break 36 | } 37 | 38 | r := i.Rune 39 | if syntax.Flags(i.Arg)&syntax.FoldCase != 0 { 40 | // Build folded list. 41 | var rr []rune 42 | if len(r) == 1 { 43 | rr = appendFoldedRange(rr, r[0], r[0]) 44 | } else { 45 | for j := 0; j < len(r); j += 2 { 46 | rr = appendFoldedRange(rr, r[j], r[j+1]) 47 | } 48 | } 49 | r = rr 50 | } 51 | 52 | b.init(prog, uint32(pc), i.Out) 53 | if len(r) == 1 { 54 | b.addRange(r[0], r[0], false) 55 | } else { 56 | for j := 0; j < len(r); j += 2 { 57 | b.addRange(r[j], r[j+1], false) 58 | } 59 | } 60 | 61 | case syntax.InstRuneAny, syntax.InstRuneAnyNotNL: 62 | // All runes. 63 | // AnyNotNL should exclude \n but the line-at-a-time 64 | // execution takes care of that for us. 65 | b.init(prog, uint32(pc), i.Out) 66 | b.addRange(0, unicode.MaxRune, false) 67 | } 68 | } 69 | return nil 70 | } 71 | 72 | func oneByteRange(i *syntax.Inst) (lo, hi byte, fold, ok bool) { 73 | if i.Op == syntax.InstRune1 { 74 | r := i.Rune[0] 75 | if r < utf8.RuneSelf { 76 | return byte(r), byte(r), false, true 77 | } 78 | } 79 | if i.Op != syntax.InstRune { 80 | return 81 | } 82 | fold = syntax.Flags(i.Arg)&syntax.FoldCase != 0 83 | if len(i.Rune) == 1 || len(i.Rune) == 2 && i.Rune[0] == i.Rune[1] { 84 | r := i.Rune[0] 85 | if r >= utf8.RuneSelf { 86 | return 87 | } 88 | if fold && !asciiFold(r) { 89 | return 90 | } 91 | return byte(r), byte(r), fold, true 92 | } 93 | if len(i.Rune) == 2 && i.Rune[1] < utf8.RuneSelf { 94 | if fold { 95 | for r := i.Rune[0]; r <= i.Rune[1]; r++ { 96 | if asciiFold(r) { 97 | return 98 | } 99 | } 100 | } 101 | return byte(i.Rune[0]), byte(i.Rune[1]), fold, true 102 | } 103 | if len(i.Rune) == 4 && i.Rune[0] == i.Rune[1] && i.Rune[2] == i.Rune[3] && unicode.SimpleFold(i.Rune[0]) == i.Rune[2] && unicode.SimpleFold(i.Rune[2]) == i.Rune[0] { 104 | return byte(i.Rune[0]), byte(i.Rune[0]), true, true 105 | } 106 | 107 | return 108 | } 109 | 110 | func asciiFold(r rune) bool { 111 | if r >= utf8.RuneSelf { 112 | return false 113 | } 114 | r1 := unicode.SimpleFold(r) 115 | if r1 >= utf8.RuneSelf { 116 | return false 117 | } 118 | if r1 == r { 119 | return true 120 | } 121 | return unicode.SimpleFold(r1) == r 122 | } 123 | 124 | func maxRune(n int) rune { 125 | b := 0 126 | if n == 1 { 127 | b = 7 128 | } else { 129 | b = 8 - (n + 1) + 6*(n-1) 130 | } 131 | return 1< 0xbf { 178 | // Not a continuation byte, no need to cache. 179 | return b.uncachedSuffix(lo, hi, fold, next) 180 | } 181 | 182 | key := cacheKey{lo, hi, fold, next} 183 | if pc, ok := b.cache[key]; ok { 184 | return pc 185 | } 186 | 187 | pc := b.uncachedSuffix(lo, hi, fold, next) 188 | b.cache[key] = pc 189 | return pc 190 | } 191 | 192 | func (b *runeBuilder) addBranch(pc uint32) { 193 | // Add pc to the branch at the beginning. 194 | i := &b.p.Inst[b.begin] 195 | switch i.Op { 196 | case syntax.InstFail: 197 | i.Op = syntax.InstNop 198 | i.Out = pc 199 | return 200 | case syntax.InstNop: 201 | i.Op = syntax.InstAlt 202 | i.Arg = pc 203 | return 204 | case syntax.InstAlt: 205 | apc := uint32(len(b.p.Inst)) 206 | b.p.Inst = append(b.p.Inst, syntax.Inst{Op: instAlt, Out: i.Arg, Arg: pc}) 207 | i = &b.p.Inst[b.begin] 208 | i.Arg = apc 209 | b.begin = apc 210 | } 211 | } 212 | 213 | func (b *runeBuilder) addRange(lo, hi rune, fold bool) { 214 | if lo > hi { 215 | return 216 | } 217 | 218 | // TODO: Pick off 80-10FFFF for special handling? 219 | if lo == 0x80 && hi == 0x10FFFF { 220 | } 221 | 222 | // Split range into same-length sized ranges. 223 | for i := 1; i < utf8.UTFMax; i++ { 224 | max := maxRune(i) 225 | if lo <= max && max < hi { 226 | b.addRange(lo, max, fold) 227 | b.addRange(max+1, hi, fold) 228 | return 229 | } 230 | } 231 | 232 | // ASCII range is special. 233 | if hi < utf8.RuneSelf { 234 | b.addBranch(b.suffix(byte(lo), byte(hi), fold, 0)) 235 | return 236 | } 237 | 238 | // Split range into sections that agree on leading bytes. 239 | for i := 1; i < utf8.UTFMax; i++ { 240 | m := rune(1)<= 0; i-- { 265 | pc = b.suffix(ulo[i], uhi[i], false, pc) 266 | } 267 | b.addBranch(pc) 268 | } 269 | -------------------------------------------------------------------------------- /codesearch/sparse/set.go: -------------------------------------------------------------------------------- 1 | // Copyright 2011 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | // Package sparse implements sparse sets. 6 | package sparse 7 | 8 | // For comparison: running cindex over the Linux 2.6 kernel with this 9 | // implementation of trigram sets takes 11 seconds. If I change it to 10 | // a bitmap (which must be cleared between files) it takes 25 seconds. 11 | 12 | // A Set is a sparse set of uint32 values. 13 | // http://research.swtch.com/2008/03/using-uninitialized-memory-for-fun-and.html 14 | type Set struct { 15 | dense []uint32 16 | sparse []uint32 17 | } 18 | 19 | // NewSet returns a new Set with a given maximum size. 20 | // The set can contain numbers in [0, max-1]. 21 | func NewSet(max uint32) *Set { 22 | return &Set{ 23 | sparse: make([]uint32, max), 24 | } 25 | } 26 | 27 | // Init initializes a Set to have a given maximum size. 28 | // The set can contain numbers in [0, max-1]. 29 | func (s *Set) Init(max uint32) { 30 | s.sparse = make([]uint32, max) 31 | } 32 | 33 | // Reset clears (empties) the set. 34 | func (s *Set) Reset() { 35 | s.dense = s.dense[:0] 36 | } 37 | 38 | // Add adds x to the set if it is not already there. 39 | func (s *Set) Add(x uint32) { 40 | v := s.sparse[x] 41 | if v < uint32(len(s.dense)) && s.dense[v] == x { 42 | return 43 | } 44 | n := len(s.dense) 45 | s.sparse[x] = uint32(n) 46 | s.dense = append(s.dense, x) 47 | } 48 | 49 | // Has reports whether x is in the set. 50 | func (s *Set) Has(x uint32) bool { 51 | v := s.sparse[x] 52 | return v < uint32(len(s.dense)) && s.dense[v] == x 53 | } 54 | 55 | // Dense returns the values in the set. 56 | // The values are listed in the order in which they 57 | // were inserted. 58 | func (s *Set) Dense() []uint32 { 59 | return s.dense 60 | } 61 | 62 | // Len returns the number of values in the set. 63 | func (s *Set) Len() int { 64 | return len(s.dense) 65 | } 66 | -------------------------------------------------------------------------------- /concourse.md: -------------------------------------------------------------------------------- 1 | # Concourse CI 2 | 3 | * Overview and Docs: https://concourse-ci.org/ 4 | * Tutorial: https://concoursetutorial.com/ 5 | * Pipeline: [concourse.yml](concourse.yml) 6 | 7 | # Deployment 8 | 9 | ### Concourse CI 10 | 11 | First, download compose file: 12 | 13 | wget https://concourse-ci.org/docker-compose.yml 14 | 15 | Now edit that file to set password and url: 16 | 17 | CONCOURSE_EXTERNAL_URL: http://ci.example.com:8080 18 | CONCOURSE_ADD_LOCAL_USER: ADMIN_LOGIN:ADMIN_PASSWORD 19 | CONCOURSE_MAIN_TEAM_LOCAL_USER: ADMIN_LOGIN 20 | 21 | Start: 22 | 23 | docker-compose up -d 24 | 25 | Now open the url and find link to download *CLI tools*. 26 | 27 | Install fly CLI: 28 | 29 | sudo mkdir -p /usr/local/bin 30 | sudo mv ~/Downloads/fly /usr/local/bin 31 | sudo chmod 0755 /usr/local/bin/fly 32 | 33 | 34 | Finally, create *Target*: 35 | 36 | fly --target hound login --concourse-url http://ci.example.com:8080 -u ADMIN_LOGIN -p ADMIN_PASSWORD 37 | 38 | ### Github Token 39 | 40 | Create a token with the following access rights: 41 | 42 | * ``write:packages`` 43 | * ``repo:status`` 44 | 45 | ### Parameters file 46 | 47 | Create a file ``concourse-params.yml`` of the the following content: 48 | 49 | github-username: USERNAME 50 | github-token: TOKEN 51 | webhook-token: ANY-RANDOM-STRING 52 | 53 | * Use TOKEN that created for USERNAME 54 | 55 | ### Set Pipeline 56 | 57 | Execute following command: 58 | 59 | fly --target=hound set-pipeline --pipeline=pull-requests --config=concourse.yml --load-vars-from=concourse-params.yml 60 | 61 | ### Github Webhook 62 | 63 | At Github Repository configure webhook to [Concourse CI API](https://concourse-ci.org/resources.html#resource-webhook-token): 64 | 65 | * **Payload URL**: *http://CI.EXAMPLE.COM:8080/api/v1/teams/main/pipelines/pull-requests/resources/pr/check/webhook?webhook_token=WEBHOOK_TOKEN* 66 | * **Content Type**: any value is ok 67 | * **Secret**: leave empty 68 | * **Which events would you like to trigger this webhook?**: *Let me select individual events.*: 69 | 70 | * **[v] Pull Requests** 71 | 72 | 73 | 74 | 75 | -------------------------------------------------------------------------------- /concourse.yml: -------------------------------------------------------------------------------- 1 | # see concourse.md for deployment instructions 2 | resource_types: 3 | - name: pull-request 4 | type: docker-image 5 | source: 6 | repository: teliaoss/github-pr-resource 7 | 8 | resources: 9 | - name: pr 10 | type: pull-request 11 | check_every: 24h 12 | webhook_token: ((webhook-token)) 13 | source: 14 | repository: itpp-labs/hound 15 | access_token: ((github-token)) 16 | 17 | - name: docker-registry 18 | type: docker-image 19 | source: 20 | repository: docker.pkg.github.com/itpp-labs/hound/pr 21 | username: ((github-username)) 22 | password: ((github-token)) 23 | 24 | jobs: 25 | - name: review-pull-request 26 | plan: 27 | - get: pr 28 | trigger: true 29 | version: latest 30 | - put: set-status-pending 31 | resource: pr 32 | params: 33 | path: pr 34 | status: pending 35 | 36 | - task: analyze-pr 37 | config: 38 | platform: linux 39 | image_resource: 40 | type: docker-image 41 | source: {repository: alpine, tag: "latest"} 42 | inputs: 43 | - name: pr 44 | outputs: 45 | - name: pr-info 46 | run: 47 | path: /bin/sh 48 | args: 49 | - -xce 50 | - | 51 | BASE_SHA=$(cat pr/.git/resource/base_sha | cut -c 1-7) 52 | HEAD_SHA=$(cat pr/.git/resource/head_sha | cut -c 1-7) 53 | PR_NUM=$(cat pr/.git/resource/pr) 54 | TAG="pr-$PR_NUM-head-$HEAD_SHA-base-$BASE_SHA" 55 | echo $TAG > pr-info/tag.txt 56 | echo "To test updates run: 57 | 58 | # normal 59 | docker run -p 6080:6080 --rm docker.pkg.github.com/itpp-labs/hound/pr:$TAG 60 | # dev 61 | docker run -p 6080:6080 -p 9000:9000 --rm docker.pkg.github.com/itpp-labs/hound/pr:$TAG --dev 62 | 63 | You may need be [authenticated](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line): 64 | 65 | docker login docker.pkg.github.com -u GITHUB_USERNAME -p GITHUB_TOKEN 66 | 67 | > Posted from Concourse CI (see concourse.yml)" > pr-info/comment.txt 68 | 69 | on_failure: 70 | put: set-status-failure 71 | resource: pr 72 | params: 73 | path: pr 74 | status: failure 75 | 76 | - put: docker-registry 77 | params: 78 | build: pr 79 | dockerfile: pr/DockerfilePR 80 | tag_file: pr-info/tag.txt 81 | get_params: {skip_download: true} 82 | 83 | on_failure: 84 | put: set-status-failure 85 | resource: pr 86 | params: 87 | path: pr 88 | status: failure 89 | 90 | - put: set-status-success 91 | resource: pr 92 | params: 93 | path: pr 94 | status: success 95 | target_url: $ATC_EXTERNAL_URL/builds/$BUILD_ID 96 | # it's not neccesary to post comment if we print it as a log in the pipeline 97 | comment_file: pr-info/comment.txt 98 | -------------------------------------------------------------------------------- /config-example.json: -------------------------------------------------------------------------------- 1 | { 2 | "max-concurrent-indexers" : 2, 3 | "max-concurrent-searchers" : 2, 4 | "max-repos-in-first-result" : 10, 5 | "max-repos-in-next-result" : 30, 6 | "dbpath" : "data", 7 | "title" : "Hound", 8 | "health-check-uri" : "/healthz", 9 | "init-search": { 10 | "excludeFiles": "ui/bindata.go" 11 | }, 12 | "ads": [ 13 | "Try it here: search.odooism.com" 14 | ], 15 | "repos": [ 16 | { 17 | "name" : "SomeGitRepo", 18 | "url" : "https://www.github.com/YourOrganization/RepoOne.git" 19 | }, 20 | { 21 | "name" : "AnotherGitRepo", 22 | "url" : "https://www.github.com/YourOrganization/RepoOne.git", 23 | "ms-between-poll": 10000, 24 | "exclude-dot-files": true, 25 | "vcs-config": { 26 | "ref": "master" 27 | } 28 | }, 29 | { 30 | "name" : "SomeMercurialRepo", 31 | "url" : "https://www.example.com/foo/hg", 32 | "vcs" : "hg" 33 | }, 34 | { 35 | "name" : "Subversion", 36 | "url" : "http://my-svn.com/repo", 37 | "url-pattern" : { 38 | "base-url" : "{url}/{path}{anchor}" 39 | }, 40 | "vcs" : "svn" 41 | }, 42 | { 43 | "name" : "SubversionWithCreds", 44 | "url" : "http://my-private-svn.com/repo", 45 | "url-pattern" : { 46 | "base-url" : "{url}/{path}{anchor}" 47 | }, 48 | "vcs" : "svn", 49 | "vcs-config" : { 50 | "username" : "username_for_ro_account", 51 | "password" : "password_for_ro_account" 52 | } 53 | }, 54 | { 55 | "name" : "LocalFolder", 56 | "url" : "file:///absolute/path/to/directory" 57 | }, 58 | { 59 | "name" : "RepoWithCustomUrls", 60 | "url" : "https://github.com/username/Foo.git", 61 | "url-pattern" : { 62 | "base-url" : "{url}/files/{path}{anchor}", 63 | "anchor" : "#line={line}" 64 | } 65 | }, 66 | { 67 | "name" : "BitbucketCustomUrl", 68 | "url" : "git@bitbucket.org:organization/project.git", 69 | "url-pattern" : { 70 | "base-url" : "https://{url}/src/master/{path}{anchor}", 71 | "anchor" : "#{filename}-{line}" 72 | } 73 | }, 74 | { 75 | "name" : "RepoWithPollingDisabled", 76 | "url" : "https://www.github.com/YourOrganization/RepoOne.git", 77 | "enable-poll-updates" : false 78 | }, 79 | { 80 | "name" : "RepoWithPushingEnabled", 81 | "url" : "https://www.github.com/YourOrganization/RepoOne.git", 82 | "enable-push-updates" : true 83 | }, 84 | { 85 | "name" : "RepoIsGitHubWiki", 86 | "url" : "https://github.com/YourOrganization/RepoWithWiki.wiki.git", 87 | "url-pattern" : { 88 | "base-url" : "{url}/{path}" 89 | } 90 | } 91 | ] 92 | } 93 | -------------------------------------------------------------------------------- /config/config.go: -------------------------------------------------------------------------------- 1 | package config 2 | 3 | import ( 4 | "encoding/base64" 5 | "encoding/json" 6 | "errors" 7 | "net/url" 8 | "os" 9 | "path/filepath" 10 | "time" 11 | ) 12 | 13 | const ( 14 | defaultMsBetweenPoll = 30000 15 | defaultMaxConcurrentIndexers = 2 16 | defaultMaxConcurrentSearchers = 1000 17 | defaultMaxReposInFirstResult = 10 18 | defaultMaxReposInNextResult = 30 19 | defaultPushEnabled = false 20 | defaultPollEnabled = true 21 | defaultTitle = "Hound" 22 | defaultVcs = "git" 23 | defaultBaseUrl = "{url}/blob/{rev}/{path}{anchor}" 24 | defaultAnchor = "#L{line}" 25 | defaultHealthCheckURI = "/healthz" 26 | ) 27 | 28 | type UrlPattern struct { 29 | BaseUrl string `json:"base-url"` 30 | Anchor string `json:"anchor"` 31 | } 32 | 33 | type PatternLink struct { 34 | Pattern string `json:"pattern"` 35 | Link string `json:"link"` 36 | } 37 | 38 | type Repo struct { 39 | Name string `json:"name"` 40 | Url string `json:"url"` 41 | MsBetweenPolls int `json:"ms-between-poll"` 42 | Vcs string `json:"vcs"` 43 | VcsConfigMessage *SecretMessage `json:"vcs-config"` 44 | UrlPattern *UrlPattern `json:"url-pattern"` 45 | ExcludeDotFiles bool `json:"exclude-dot-files"` 46 | EnablePollUpdates *bool `json:"enable-poll-updates"` 47 | EnablePushUpdates *bool `json:"enable-push-updates"` 48 | PatternLinks []PatternLink `json:"pattern-links"` 49 | } 50 | 51 | // Used for interpreting the config value for fields that use *bool. If a value 52 | // is present, that value is returned. Otherwise, the default is returned. 53 | func optionToBool(val *bool, def bool) bool { 54 | if val == nil { 55 | return def 56 | } 57 | return *val 58 | } 59 | 60 | // Are polling based updates enabled on this repo? 61 | func (r *Repo) PollUpdatesEnabled() bool { 62 | return optionToBool(r.EnablePollUpdates, defaultPollEnabled) 63 | } 64 | 65 | // Are push based updates enabled on this repo? 66 | func (r *Repo) PushUpdatesEnabled() bool { 67 | return optionToBool(r.EnablePushUpdates, defaultPushEnabled) 68 | } 69 | 70 | type Config struct { 71 | DbPath string `json:"dbpath"` 72 | Title string `json:"title"` 73 | Favicon *Favicon `json:"favicon"` 74 | Repos []*Repo `json:"repos"` 75 | MaxConcurrentIndexers int `json:"max-concurrent-indexers"` 76 | MaxConcurrentSearchers int `json:"max-concurrent-searchers"` 77 | MaxReposInFirstResult int `json:"max-repos-in-first-result"` 78 | MaxReposInNextResult int `json:"max-repos-in-next-result"` 79 | HealthCheckURI string `json:"health-check-uri"` 80 | InitSearch map[string]string `json:"init-search"` 81 | Ads []*string `json:"ads"` 82 | } 83 | 84 | type Favicon struct { 85 | Image []byte 86 | ModTime time.Time 87 | } 88 | 89 | func (f *Favicon) UnmarshalJSON(b []byte) error { 90 | if b == nil { 91 | return errors.New("Favicon: UnmarshalJSON on nil pointer") 92 | } 93 | unquoted := string(b[1 : len(b)-2]) 94 | data, err := base64.RawStdEncoding.DecodeString(unquoted) 95 | if err != nil { 96 | panic(err) 97 | } 98 | f.Image = append(((*f).Image)[0:0], data...) 99 | f.ModTime = time.Now() 100 | return nil 101 | } 102 | 103 | type ClientConfig struct { 104 | InitSearch map[string]string 105 | } 106 | 107 | // SecretMessage is just like json.RawMessage but it will not 108 | // marshal its value as JSON. This is to ensure that vcs-config 109 | // is not marshalled into JSON and send to the UI. 110 | type SecretMessage []byte 111 | 112 | // This always marshals to an empty object. 113 | func (s *SecretMessage) MarshalJSON() ([]byte, error) { 114 | return []byte("{}"), nil 115 | } 116 | 117 | // See http://golang.org/pkg/encoding/json/#RawMessage.UnmarshalJSON 118 | func (s *SecretMessage) UnmarshalJSON(b []byte) error { 119 | if b == nil { 120 | return errors.New("SecretMessage: UnmarshalJSON on nil pointer") 121 | } 122 | *s = append((*s)[0:0], b...) 123 | return nil 124 | } 125 | 126 | // Get the JSON encode vcs-config for this repo. This returns nil if 127 | // the repo doesn't declare a vcs-config. 128 | func (r *Repo) VcsConfig() []byte { 129 | if r.VcsConfigMessage == nil { 130 | return nil 131 | } 132 | return *r.VcsConfigMessage 133 | } 134 | 135 | // Populate missing config values with default values. 136 | func initRepo(r *Repo) { 137 | if r.MsBetweenPolls == 0 { 138 | r.MsBetweenPolls = defaultMsBetweenPoll 139 | } 140 | 141 | if r.Vcs == "" { 142 | r.Vcs = defaultVcs 143 | } 144 | 145 | if r.UrlPattern == nil { 146 | r.UrlPattern = &UrlPattern{ 147 | BaseUrl: defaultBaseUrl, 148 | Anchor: defaultAnchor, 149 | } 150 | } else { 151 | if r.UrlPattern.BaseUrl == "" { 152 | r.UrlPattern.BaseUrl = defaultBaseUrl 153 | } 154 | 155 | if r.UrlPattern.Anchor == "" { 156 | r.UrlPattern.Anchor = defaultAnchor 157 | } 158 | } 159 | } 160 | 161 | // Populate missing config values with default values. 162 | func initConfig(c *Config) { 163 | if c.MaxConcurrentIndexers == 0 { 164 | c.MaxConcurrentIndexers = defaultMaxConcurrentIndexers 165 | } 166 | if c.MaxConcurrentSearchers == 0 { 167 | c.MaxConcurrentSearchers = defaultMaxConcurrentSearchers 168 | } 169 | if c.MaxReposInFirstResult == 0 { 170 | c.MaxReposInFirstResult = defaultMaxReposInFirstResult 171 | } 172 | if c.MaxReposInNextResult == 0 { 173 | c.MaxReposInNextResult = defaultMaxReposInNextResult 174 | } 175 | 176 | if c.HealthCheckURI == "" { 177 | c.HealthCheckURI = defaultHealthCheckURI 178 | } 179 | } 180 | 181 | func (c *Config) LoadFromFile(filename string) error { 182 | r, err := os.Open(filename) 183 | if err != nil { 184 | return err 185 | } 186 | defer r.Close() 187 | 188 | if err := json.NewDecoder(r).Decode(c); err != nil { 189 | return err 190 | } 191 | 192 | if c.Title == "" { 193 | c.Title = defaultTitle 194 | } 195 | 196 | if !filepath.IsAbs(c.DbPath) { 197 | path, err := filepath.Abs( 198 | filepath.Join(filepath.Dir(filename), c.DbPath)) 199 | if err != nil { 200 | return err 201 | } 202 | c.DbPath = path 203 | } 204 | 205 | for _, repo := range c.Repos { 206 | initRepo(repo) 207 | } 208 | 209 | initConfig(c) 210 | 211 | return nil 212 | } 213 | 214 | func (c *Config) ToJsonString() (string, error) { 215 | client := ClientConfig{c.InitSearch} 216 | b, err := json.Marshal(client) 217 | if err != nil { 218 | return "", err 219 | } 220 | 221 | return string(b), nil 222 | } 223 | func get(dict map[string]string, key, dflt string) string { 224 | if value, ok := dict[key]; ok { 225 | return value 226 | } 227 | return dflt 228 | } 229 | 230 | func (c *Config) ToOpenSearchParams() (string, error) { 231 | // This must be the same as in App.jsx (see const initParams = ...) 232 | // Exception is for InitSearch.q which is not used here 233 | i := get(c.InitSearch, "i", "nope") 234 | files := get(c.InitSearch, "files", "") 235 | excludeFiles := get(c.InitSearch, "excludeFiles", "") 236 | repos := get(c.InitSearch, "repos", ".*") 237 | params := url.Values{} 238 | params.Add("i", i) 239 | params.Add("files", files) 240 | params.Add("excludeFiles", excludeFiles) 241 | params.Add("repos", repos) 242 | 243 | return params.Encode(), nil 244 | } 245 | -------------------------------------------------------------------------------- /config/config_test.go: -------------------------------------------------------------------------------- 1 | package config 2 | 3 | import ( 4 | "path/filepath" 5 | "runtime" 6 | "testing" 7 | 8 | "github.com/itpp-labs/hound/vcs" 9 | ) 10 | 11 | const exampleConfigFile = "config-example.json" 12 | 13 | func rootDir() string { 14 | _, file, _, _ := runtime.Caller(0) 15 | return filepath.Join(filepath.Dir(file), "..") 16 | } 17 | 18 | // Test that we can parse the example config file. This ensures that as we 19 | // add examples, we don't muck them up. 20 | func TestExampleConfigsAreValid(t *testing.T) { 21 | var cfg Config 22 | if err := cfg.LoadFromFile(filepath.Join(rootDir(), exampleConfigFile)); err != nil { 23 | t.Fatalf("Unable to parse %s: %s", exampleConfigFile, err) 24 | } 25 | 26 | if len(cfg.Repos) == 0 { 27 | t.Fatal("config has no repos") 28 | } 29 | 30 | // Ensure that each of the declared vcs's are legit 31 | for _, repo := range cfg.Repos { 32 | _, err := vcs.New(repo.Vcs, repo.VcsConfig()) 33 | if err != nil { 34 | t.Fatal(err) 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /default-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "dbpath" : "db", 3 | "init-search": { 4 | "excludeFiles": "ui/bindata.go" 5 | }, 6 | "repos" : [ 7 | { 8 | "name" : "Hound", 9 | "url" : "https://github.com/itpp-labs/hound.git" 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /index/grep.go: -------------------------------------------------------------------------------- 1 | package index 2 | 3 | import ( 4 | "bytes" 5 | "compress/gzip" 6 | "io" 7 | "os" 8 | 9 | "github.com/itpp-labs/hound/codesearch/regexp" 10 | ) 11 | 12 | var nl = []byte{'\n'} 13 | 14 | type grepper struct { 15 | buf []byte 16 | } 17 | 18 | func countLines(b []byte) int { 19 | n := 0 20 | for { 21 | i := bytes.IndexByte(b, '\n') 22 | if i < 0 { 23 | break 24 | } 25 | n++ 26 | b = b[i+1:] 27 | } 28 | return n 29 | } 30 | 31 | func (g *grepper) grepFile(filename string, re *regexp.Regexp, 32 | fn func(line []byte, lineno int) (bool, error)) error { 33 | r, err := os.Open(filename) 34 | if err != nil { 35 | return err 36 | } 37 | defer r.Close() 38 | 39 | c, err := gzip.NewReader(r) 40 | if err != nil { 41 | return err 42 | } 43 | defer c.Close() 44 | 45 | return g.grep(c, re, fn) 46 | } 47 | 48 | func (g *grepper) grep2File(filename string, re *regexp.Regexp, nctx int, 49 | fn func(line []byte, lineno int, before [][]byte, after [][]byte) (bool, error)) error { 50 | r, err := os.Open(filename) 51 | if err != nil { 52 | return err 53 | } 54 | defer r.Close() 55 | 56 | c, err := gzip.NewReader(r) 57 | if err != nil { 58 | return err 59 | } 60 | defer c.Close() 61 | 62 | return g.grep2(c, re, nctx, fn) 63 | } 64 | 65 | func (g *grepper) fillFrom(r io.Reader) ([]byte, error) { 66 | if g.buf == nil { 67 | g.buf = make([]byte, 1<<20) 68 | } 69 | 70 | off := 0 71 | for { 72 | n, err := io.ReadFull(r, g.buf[off:]) 73 | if err == io.ErrUnexpectedEOF || err == io.EOF { 74 | return g.buf[:off+n], nil 75 | } else if err != nil { 76 | return nil, err 77 | } 78 | 79 | // grow the storage 80 | buf := make([]byte, len(g.buf)*2) 81 | copy(buf, g.buf) 82 | g.buf = buf 83 | off += n 84 | } 85 | } 86 | 87 | func lastNLines(buf []byte, n int) [][]byte { 88 | if len(buf) == 0 || n == 0 { 89 | return nil 90 | } 91 | 92 | r := make([][]byte, n) 93 | for i := 0; i < n; i++ { 94 | m := bytes.LastIndex(buf, nl) 95 | if m < 0 { 96 | if len(buf) == 0 { 97 | return r[n-i:] 98 | } 99 | r[n-i-1] = buf 100 | return r[n-i-1:] 101 | } 102 | r[n-i-1] = buf[m+1:] 103 | buf = buf[:m] 104 | } 105 | 106 | return r 107 | } 108 | 109 | func firstNLines(buf []byte, n int) [][]byte { 110 | if len(buf) == 0 || n == 0 { 111 | return nil 112 | } 113 | 114 | r := make([][]byte, n) 115 | for i := 0; i < n; i++ { 116 | m := bytes.Index(buf, nl) 117 | if m < 0 { 118 | if len(buf) == 0 { 119 | return r[:i] 120 | } 121 | r[i] = buf 122 | return r[:i+1] 123 | } 124 | r[i] = buf[:m] 125 | buf = buf[m+1:] 126 | } 127 | return r 128 | } 129 | 130 | // TODO(knorton): This is still being tested. This is a grep that supports context lines. Unlike the version 131 | // in codesearch, this one does not operate on chunks. The downside is that we have to have the whole file 132 | // in memory to do the grep. Fortunately, we limit the size of files that get indexed anyway. 10M files tend 133 | // to not be source code. 134 | func (g *grepper) grep2( 135 | r io.Reader, 136 | re *regexp.Regexp, 137 | nctx int, 138 | fn func(line []byte, lineno int, before [][]byte, after [][]byte) (bool, error)) error { 139 | 140 | buf, err := g.fillFrom(r) 141 | if err != nil { 142 | return err 143 | } 144 | 145 | lineno := 0 146 | for { 147 | if len(buf) == 0 { 148 | return nil 149 | } 150 | 151 | m := re.Match(buf, true, true) 152 | if m < 0 { 153 | return nil 154 | } 155 | 156 | // start of matched line. 157 | str := bytes.LastIndex(buf[:m], nl) + 1 158 | 159 | //end of previous line 160 | endl := str - 1 161 | if endl < 0 { 162 | endl = 0 163 | } 164 | 165 | //end of current line 166 | end := m + 1 167 | if end > len(buf) { 168 | end = len(buf) 169 | } 170 | 171 | lineno += countLines(buf[:str]) 172 | 173 | more, err := fn( 174 | bytes.TrimRight(buf[str:end], "\n"), 175 | lineno+1, 176 | lastNLines(buf[:endl], nctx), 177 | firstNLines(buf[end:], nctx)) 178 | if err != nil { 179 | return err 180 | } 181 | if !more { 182 | return nil 183 | } 184 | 185 | lineno++ 186 | buf = buf[end:] 187 | } 188 | } 189 | 190 | // This nonsense is adapted from https://code.google.com/p/codesearch/source/browse/regexp/match.go#399 191 | // and I assume it is a mess to make it faster, but I would like to try a much simpler cleaner version. 192 | func (g *grepper) grep(r io.Reader, re *regexp.Regexp, fn func(line []byte, lineno int) (bool, error)) error { 193 | if g.buf == nil { 194 | g.buf = make([]byte, 1<<20) 195 | } 196 | 197 | var ( 198 | buf = g.buf[:0] 199 | lineno = 1 200 | beginText = true 201 | endText = false 202 | ) 203 | 204 | for { 205 | n, err := io.ReadFull(r, buf[len(buf):cap(buf)]) 206 | buf = buf[:len(buf)+n] 207 | end := len(buf) 208 | if err == nil { 209 | end = bytes.LastIndex(buf, nl) + 1 210 | } else { 211 | endText = true 212 | } 213 | chunkStart := 0 214 | for chunkStart < end { 215 | m1 := re.Match(buf[chunkStart:end], beginText, endText) + chunkStart 216 | beginText = false 217 | if m1 < chunkStart { 218 | break 219 | } 220 | lineStart := bytes.LastIndex(buf[chunkStart:m1], nl) + 1 + chunkStart 221 | lineEnd := m1 + 1 222 | if lineEnd > end { 223 | lineEnd = end 224 | } 225 | lineno += countLines(buf[chunkStart:lineStart]) 226 | line := buf[lineStart:lineEnd] 227 | more, err := fn(line, lineno) 228 | if err != nil { 229 | return err 230 | } 231 | if !more { 232 | return nil 233 | } 234 | lineno++ 235 | chunkStart = lineEnd 236 | } 237 | if err == nil { 238 | lineno += countLines(buf[chunkStart:end]) 239 | } 240 | 241 | n = copy(buf, buf[end:]) 242 | buf = buf[:n] 243 | if len(buf) == 0 && err != nil { 244 | if err != io.EOF && err != io.ErrUnexpectedEOF { 245 | return err 246 | } 247 | return nil 248 | } 249 | } 250 | 251 | return nil 252 | } 253 | -------------------------------------------------------------------------------- /index/grep_test.go: -------------------------------------------------------------------------------- 1 | package index 2 | 3 | import ( 4 | "bytes" 5 | "fmt" 6 | "strings" 7 | "testing" 8 | 9 | "github.com/itpp-labs/hound/codesearch/regexp" 10 | ) 11 | 12 | var ( 13 | subjA = []byte("first\nsecond\nthird\nfourth\nfifth\nsixth") 14 | subjB = []byte("\n") 15 | subjC = []byte("\n\n\n\nfoo\nbar\n\nbaz") 16 | ) 17 | 18 | func formatLines(lines []string) string { 19 | return fmt.Sprintf("[%s]", strings.Join(lines, ",")) 20 | } 21 | 22 | func formatLinesFromBytes(lines [][]byte) string { 23 | n := len(lines) 24 | s := make([]string, n) 25 | for i := 0; i < n; i++ { 26 | s[i] = string(lines[i]) 27 | } 28 | return formatLines(s) 29 | } 30 | 31 | func assertLinesMatch(t *testing.T, lines [][]byte, expected []string) { 32 | if len(lines) != len(expected) { 33 | t.Errorf("lines do not match: %s vs %s", 34 | formatLinesFromBytes(lines), 35 | formatLines(expected)) 36 | return 37 | } 38 | for i, str := range expected { 39 | if str != string(lines[i]) { 40 | t.Errorf("lines do not match: %s vs %s", 41 | formatLinesFromBytes(lines), 42 | formatLines(expected)) 43 | } 44 | } 45 | } 46 | 47 | func GenBuf(n int) []byte { 48 | b := make([]byte, n) 49 | for i := 0; i < n; i++ { 50 | b[i] = byte(i) 51 | } 52 | return b 53 | } 54 | 55 | func TestFillFrom(t *testing.T) { 56 | var g grepper 57 | // this is to force buffer growth 58 | g.buf = make([]byte, 2) 59 | 60 | d := GenBuf(1024) 61 | b, _ := g.fillFrom(bytes.NewBuffer(d)) 62 | if !bytes.Equal(d, b) { 63 | t.Errorf("filled buffer doesn't match original: len=%d & len=%d", len(d), len(b)) 64 | } 65 | } 66 | 67 | func TestFirstNLines(t *testing.T) { 68 | assertLinesMatch(t, firstNLines(subjA, 1), []string{ 69 | "first", 70 | }) 71 | 72 | assertLinesMatch(t, firstNLines(subjA, 2), []string{ 73 | "first", 74 | "second", 75 | }) 76 | 77 | assertLinesMatch(t, firstNLines(subjA, 6), []string{ 78 | "first", 79 | "second", 80 | "third", 81 | "fourth", 82 | "fifth", 83 | "sixth", 84 | }) 85 | 86 | assertLinesMatch(t, firstNLines(subjB, 1), []string{ 87 | "", 88 | }) 89 | 90 | assertLinesMatch(t, firstNLines(subjB, 2), []string{ 91 | "", 92 | }) 93 | 94 | assertLinesMatch(t, firstNLines(subjC, 5), []string{ 95 | "", 96 | "", 97 | "", 98 | "", 99 | "foo", 100 | }) 101 | } 102 | 103 | func TestLastNLines(t *testing.T) { 104 | assertLinesMatch(t, lastNLines(subjA, 1), []string{ 105 | "sixth", 106 | }) 107 | 108 | assertLinesMatch(t, lastNLines(subjA, 2), []string{ 109 | "fifth", 110 | "sixth", 111 | }) 112 | 113 | assertLinesMatch(t, lastNLines(subjA, 6), []string{ 114 | "first", 115 | "second", 116 | "third", 117 | "fourth", 118 | "fifth", 119 | "sixth", 120 | }) 121 | 122 | assertLinesMatch(t, lastNLines(subjB, 1), []string{ 123 | "", 124 | }) 125 | 126 | assertLinesMatch(t, lastNLines(subjB, 2), []string{ 127 | "", 128 | }) 129 | 130 | assertLinesMatch(t, lastNLines(subjC, 5), []string{ 131 | "", 132 | "foo", 133 | "bar", 134 | "", 135 | "baz", 136 | }) 137 | } 138 | 139 | type match struct { 140 | line string 141 | no int 142 | } 143 | 144 | func aMatch(line string, no int) *match { 145 | return &match{ 146 | line: line, 147 | no: no, 148 | } 149 | } 150 | 151 | func formatMatches(matches []*match) string { 152 | str := make([]string, len(matches)) 153 | for i, match := range matches { 154 | str[i] = fmt.Sprintf("%s:%d", match.line, match.no) 155 | } 156 | return strings.Join(str, ",") 157 | } 158 | 159 | func assertMatchesMatch(t *testing.T, a, b []*match) { 160 | if len(a) != len(b) { 161 | t.Errorf("matches no match: %s vs %s", 162 | formatMatches(a), 163 | formatMatches(b)) 164 | return 165 | } 166 | 167 | for i, n := 0, len(a); i < n; i++ { 168 | if a[i].line != b[i].line || a[i].no != b[i].no { 169 | t.Errorf("matches no match: %s vs %s", 170 | formatMatches(a), 171 | formatMatches(b)) 172 | return 173 | } 174 | } 175 | } 176 | 177 | func assertGrepTest(t *testing.T, buf []byte, exp string, expects []*match) { 178 | re, err := regexp.Compile(exp) 179 | if err != nil { 180 | t.Error(err) 181 | return 182 | } 183 | 184 | var g grepper 185 | var m []*match 186 | if err := g.grep2(bytes.NewBuffer(buf), re, 0, 187 | func(line []byte, lineno int, before [][]byte, after [][]byte) (bool, error) { 188 | m = append(m, aMatch(string(line), lineno)) 189 | return true, nil 190 | }); err != nil { 191 | t.Error(err) 192 | return 193 | } 194 | 195 | assertMatchesMatch(t, m, expects) 196 | } 197 | 198 | func TestGrep(t *testing.T) { 199 | assertGrepTest(t, subjA, "s", []*match{ 200 | aMatch("first", 1), 201 | aMatch("second", 2), 202 | aMatch("sixth", 6), 203 | }) 204 | 205 | // BUG(knorton): rsc's regexp has bugs. 206 | // assertGrepTest(t, subjB, "^$", []*match{ 207 | // aMatch("", 1), 208 | // }) 209 | 210 | assertGrepTest(t, subjB, "^", []*match{ 211 | aMatch("", 1), 212 | }) 213 | 214 | assertGrepTest(t, subjC, "^", []*match{ 215 | aMatch("", 1), 216 | aMatch("", 2), 217 | aMatch("", 3), 218 | aMatch("", 4), 219 | aMatch("foo", 5), 220 | aMatch("bar", 6), 221 | aMatch("", 7), 222 | aMatch("baz", 8), 223 | }) 224 | 225 | assertGrepTest(t, subjA, "th$", []*match{ 226 | aMatch("sixth", 6), 227 | }) 228 | } 229 | 230 | func assertContextTest(t *testing.T, buf []byte, exp string, ctx int, expectsBefore [][]string, expectsAfter [][]string) { 231 | re, err := regexp.Compile(exp) 232 | if err != nil { 233 | t.Error(err) 234 | return 235 | } 236 | 237 | var gotBefore [][][]byte 238 | var gotAfter [][][]byte 239 | var g grepper 240 | if err := g.grep2(bytes.NewBuffer(buf), re, ctx, 241 | func(line []byte, lineno int, before [][]byte, after [][]byte) (bool, error) { 242 | gotBefore = append(gotBefore, before) 243 | gotAfter = append(gotAfter, after) 244 | return true, nil 245 | }); err != nil { 246 | t.Error(err) 247 | return 248 | } 249 | 250 | if len(expectsBefore) != len(gotBefore) { 251 | t.Errorf("Before had %d lines, should have had %d", 252 | len(gotBefore), 253 | len(expectsBefore)) 254 | return 255 | } 256 | 257 | if len(expectsAfter) != len(gotAfter) { 258 | t.Errorf("After had %d lines, should have had %d", 259 | len(gotBefore), 260 | len(expectsBefore)) 261 | return 262 | } 263 | 264 | for i, n := 0, len(gotBefore); i < n; i++ { 265 | assertLinesMatch(t, gotBefore[i], expectsBefore[i]) 266 | } 267 | 268 | for i, n := 0, len(gotAfter); i < n; i++ { 269 | assertLinesMatch(t, gotAfter[i], expectsAfter[i]) 270 | } 271 | } 272 | 273 | func TestContext(t *testing.T) { 274 | assertContextTest(t, subjA, "third", 2, 275 | [][]string{ 276 | []string{"first", "second"}, 277 | }, [][]string{ 278 | []string{"fourth", "fifth"}, 279 | }) 280 | 281 | assertContextTest(t, subjA, "third", 3, 282 | [][]string{ 283 | []string{"first", "second"}, 284 | }, [][]string{ 285 | []string{"fourth", "fifth", "sixth"}, 286 | }) 287 | 288 | assertContextTest(t, subjA, "first", 2, 289 | [][]string{ 290 | []string{}, 291 | }, [][]string{ 292 | []string{"second", "third"}, 293 | }) 294 | } 295 | -------------------------------------------------------------------------------- /index/index_test.go: -------------------------------------------------------------------------------- 1 | package index 2 | 3 | import ( 4 | "io/ioutil" 5 | "os" 6 | "path/filepath" 7 | "runtime" 8 | "testing" 9 | ) 10 | 11 | const ( 12 | url = "https://www.etsy.com/" 13 | rev = "r420" 14 | ) 15 | 16 | func thisDir() string { 17 | _, file, _, _ := runtime.Caller(0) 18 | return filepath.Dir(file) 19 | } 20 | 21 | func buildIndex(url, rev string) (*IndexRef, error) { 22 | dir, err := ioutil.TempDir(os.TempDir(), "hound") 23 | if err != nil { 24 | return nil, err 25 | } 26 | 27 | var opt IndexOptions 28 | 29 | return Build(&opt, dir, thisDir(), url, "test", rev) 30 | } 31 | 32 | func TestSearch(t *testing.T) { 33 | // Build an index 34 | ref, err := buildIndex(url, rev) 35 | if err != nil { 36 | t.Fatal(err) 37 | } 38 | defer ref.Remove() 39 | 40 | // Make sure the metadata in the ref is good. 41 | if ref.Rev != rev { 42 | t.Fatalf("expected rev of %s, got %s", rev, ref.Rev) 43 | } 44 | 45 | if ref.Url != url { 46 | t.Fatalf("expected url of %s got %s", url, ref.Url) 47 | } 48 | 49 | // Make sure the ref can be opened. 50 | idx, err := ref.Open() 51 | if err != nil { 52 | t.Fatal(err) 53 | } 54 | defer idx.Close() 55 | 56 | // Make sure we can carry out a search 57 | opt := &SearchOptions{} 58 | res, err := idx.PreSearch("5a1c0dac2d9b3ea4085b30dd14375c18eab993d5", opt) 59 | if err != nil { 60 | t.Fatal(err) 61 | } 62 | if _, err := idx.Search(res, opt); err != nil { 63 | t.Fatal(err) 64 | } 65 | } 66 | 67 | func TestRemove(t *testing.T) { 68 | ref, err := buildIndex(url, rev) 69 | if err != nil { 70 | t.Fatal(err) 71 | } 72 | 73 | if err := ref.Remove(); err != nil { 74 | t.Fatal(err) 75 | } 76 | 77 | if _, err := os.Stat(ref.Dir()); err == nil { 78 | t.Fatalf("Remove did not delete directory: %s", ref.Dir()) 79 | } 80 | } 81 | 82 | func TestRead(t *testing.T) { 83 | ref, err := buildIndex(url, rev) 84 | if err != nil { 85 | t.Fatal(err) 86 | } 87 | defer ref.Remove() 88 | 89 | r, err := Read(ref.Dir()) 90 | if err != nil { 91 | t.Fatal(err) 92 | } 93 | 94 | if r.Url != url { 95 | t.Fatalf("expected url of %s, got %s", url, r.Url) 96 | } 97 | 98 | if r.Rev != rev { 99 | t.Fatalf("expected rev of %s, got %s", rev, r.Rev) 100 | } 101 | 102 | idx, err := r.Open() 103 | if err != nil { 104 | t.Fatal(err) 105 | } 106 | defer idx.Close() 107 | } 108 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | // For a detailed explanation regarding each configuration property, visit: 2 | // https://jestjs.io/docs/en/configuration.html 3 | 4 | module.exports = { 5 | // All imported modules in your tests should be mocked automatically 6 | // automock: false, 7 | 8 | // Stop running tests after `n` failures 9 | // bail: 0, 10 | 11 | // Respect "browser" field in package.json when resolving modules 12 | // browser: false, 13 | 14 | // The directory where Jest should store its cached dependency information 15 | // cacheDirectory: "/tmp/jest_8hl", 16 | 17 | // Automatically clear mock calls and instances between every test 18 | clearMocks: true, 19 | 20 | // Indicates whether the coverage information should be collected while executing the test 21 | // collectCoverage: false, 22 | 23 | // An array of glob patterns indicating a set of files for which coverage information should be collected 24 | // collectCoverageFrom: undefined, 25 | 26 | // The directory where Jest should output its coverage files 27 | coverageDirectory: "coverage", 28 | 29 | // An array of regexp pattern strings used to skip coverage collection 30 | // coveragePathIgnorePatterns: [ 31 | // "/node_modules/" 32 | // ], 33 | 34 | // A list of reporter names that Jest uses when writing coverage reports 35 | // coverageReporters: [ 36 | // "json", 37 | // "text", 38 | // "lcov", 39 | // "clover" 40 | // ], 41 | 42 | // An object that configures minimum threshold enforcement for coverage results 43 | // coverageThreshold: undefined, 44 | 45 | // A path to a custom dependency extractor 46 | // dependencyExtractor: undefined, 47 | 48 | // Make calling deprecated APIs throw helpful error messages 49 | // errorOnDeprecated: false, 50 | 51 | // Force coverage collection from ignored files using an array of glob patterns 52 | // forceCoverageMatch: [], 53 | 54 | // A path to a module which exports an async function that is triggered once before all test suites 55 | // globalSetup: undefined, 56 | 57 | // A path to a module which exports an async function that is triggered once after all test suites 58 | // globalTeardown: undefined, 59 | 60 | // A set of global variables that need to be available in all test environments 61 | // globals: {}, 62 | 63 | // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers. 64 | // maxWorkers: "50%", 65 | 66 | // An array of directory names to be searched recursively up from the requiring module's location 67 | // moduleDirectories: [ 68 | // "node_modules" 69 | // ], 70 | 71 | // An array of file extensions your modules use 72 | // moduleFileExtensions: [ 73 | // "js", 74 | // "json", 75 | // "jsx", 76 | // "ts", 77 | // "tsx", 78 | // "node" 79 | // ], 80 | 81 | // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module 82 | // moduleNameMapper: {}, 83 | 84 | // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader 85 | // modulePathIgnorePatterns: [], 86 | 87 | // Activates notifications for test results 88 | // notify: false, 89 | 90 | // An enum that specifies notification mode. Requires { notify: true } 91 | // notifyMode: "failure-change", 92 | 93 | // A preset that is used as a base for Jest's configuration 94 | // preset: undefined, 95 | 96 | // Run tests from one or more projects 97 | // projects: undefined, 98 | 99 | // Use this configuration option to add custom reporters to Jest 100 | // reporters: undefined, 101 | 102 | // Automatically reset mock state between every test 103 | // resetMocks: false, 104 | 105 | // Reset the module registry before running each individual test 106 | // resetModules: false, 107 | 108 | // A path to a custom resolver 109 | // resolver: undefined, 110 | 111 | // Automatically restore mock state between every test 112 | // restoreMocks: false, 113 | 114 | // The root directory that Jest should scan for tests and modules within 115 | // rootDir: undefined, 116 | 117 | // A list of paths to directories that Jest should use to search for files in 118 | // roots: [ 119 | // "" 120 | // ], 121 | 122 | // Allows you to use a custom runner instead of Jest's default test runner 123 | // runner: "jest-runner", 124 | 125 | // The paths to modules that run some code to configure or set up the testing environment before each test 126 | // setupFiles: [], 127 | 128 | // A list of paths to modules that run some code to configure or set up the testing framework before each test 129 | // setupFilesAfterEnv: [], 130 | 131 | // A list of paths to snapshot serializer modules Jest should use for snapshot testing 132 | // snapshotSerializers: [], 133 | 134 | // The test environment that will be used for testing 135 | // testEnvironment: "jest-environment-jsdom", 136 | 137 | // Options that will be passed to the testEnvironment 138 | // testEnvironmentOptions: {}, 139 | 140 | // Adds a location field to test results 141 | // testLocationInResults: false, 142 | 143 | // The glob patterns Jest uses to detect test files 144 | // testMatch: [ 145 | // "**/__tests__/**/*.[jt]s?(x)", 146 | // "**/?(*.)+(spec|test).[tj]s?(x)" 147 | // ], 148 | 149 | // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped 150 | // testPathIgnorePatterns: [ 151 | // "/node_modules/" 152 | // ], 153 | 154 | // The regexp pattern or array of patterns that Jest uses to detect test files 155 | // testRegex: [], 156 | 157 | // This option allows the use of a custom results processor 158 | // testResultsProcessor: undefined, 159 | 160 | // This option allows use of a custom test runner 161 | // testRunner: "jasmine2", 162 | 163 | // This option sets the URL for the jsdom environment. It is reflected in properties such as location.href 164 | // testURL: "http://localhost", 165 | 166 | // Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout" 167 | // timers: "real", 168 | 169 | // A map from regular expressions to paths to transformers 170 | // transform: undefined, 171 | 172 | // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation 173 | // transformIgnorePatterns: [ 174 | // "/node_modules/" 175 | // ], 176 | 177 | // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them 178 | // unmockedModulePathPatterns: undefined, 179 | 180 | // Indicates whether each individual test should be reported during the run 181 | // verbose: undefined, 182 | 183 | // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode 184 | // watchPathIgnorePatterns: [], 185 | 186 | // Whether to use watchman for file crawling 187 | // watchman: true, 188 | }; 189 | -------------------------------------------------------------------------------- /misc/hound.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Hound Code Search and Indexing Daemon 3 | After=network.target 4 | After=httpd.service 5 | 6 | [Service] 7 | Type=simple 8 | User=nobody 9 | Group=nobody 10 | ExecStart=/usr/local/hound/bin/houndd 11 | Restart=always 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hound", 3 | "version": "1.0.0", 4 | "description": "Hound", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "jest", 8 | "format": "pretty-quick --staged --pattern '**/*.*(js|jsx)'" 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/itpp-labs/hound.git" 13 | }, 14 | "keywords": [], 15 | "author": "", 16 | "license": "ISC", 17 | "bugs": { 18 | "url": "https://github.com/itpp-labs/hound/issues" 19 | }, 20 | "homepage": "https://github.com/itpp-labs/hound#readme", 21 | "devDependencies": { 22 | "@babel/core": "^7.9.6", 23 | "@babel/preset-env": "^7.9.6", 24 | "@babel/preset-react": "^7.9.4", 25 | "babel-loader": "^8.1.0", 26 | "jest": "^25.3.0", 27 | "prettier": "^2.0.4", 28 | "pretty-quick": "^2.0.1", 29 | "create-react-class": "^15.6.3", 30 | "react": "^16.13.1", 31 | "react-dom": "^16.13.1", 32 | "react-select": "^3.1.0", 33 | "reqwest": "^2.0.5", 34 | "webpack": "^4.43.0", 35 | "webpack-cli": "^3.3.11", 36 | "webpack-dev-server": "^3.10.3" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /screen_capture.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/itpp-labs/hound/69cff672484dd54241b82e9c38f768fc25416f94/screen_capture.gif -------------------------------------------------------------------------------- /ui/assets/css/octicons/LICENSE.txt: -------------------------------------------------------------------------------- 1 | (c) 2012-2014 GitHub 2 | 3 | When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos) 4 | 5 | Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) 6 | Applies to all font files 7 | 8 | Code License: MIT (http://choosealicense.com/licenses/mit/) 9 | Applies to all other files 10 | -------------------------------------------------------------------------------- /ui/assets/css/octicons/README.md: -------------------------------------------------------------------------------- 1 | If you intend to install Octicons locally, install `octicons-local.ttf`. It should appear as “github-octicons” in your font list. It is specially designed not to conflict with GitHub's web fonts. 2 | -------------------------------------------------------------------------------- /ui/assets/css/octicons/octicons-local.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/itpp-labs/hound/69cff672484dd54241b82e9c38f768fc25416f94/ui/assets/css/octicons/octicons-local.ttf -------------------------------------------------------------------------------- /ui/assets/css/octicons/octicons.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/itpp-labs/hound/69cff672484dd54241b82e9c38f768fc25416f94/ui/assets/css/octicons/octicons.eot -------------------------------------------------------------------------------- /ui/assets/css/octicons/octicons.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/itpp-labs/hound/69cff672484dd54241b82e9c38f768fc25416f94/ui/assets/css/octicons/octicons.ttf -------------------------------------------------------------------------------- /ui/assets/css/octicons/octicons.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/itpp-labs/hound/69cff672484dd54241b82e9c38f768fc25416f94/ui/assets/css/octicons/octicons.woff -------------------------------------------------------------------------------- /ui/assets/excluded_files.tpl.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Hound - Excluded Files 6 | 7 | 8 | 9 | 10 |
11 | {{ .Source }} 12 | 13 | 14 | -------------------------------------------------------------------------------- /ui/assets/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/itpp-labs/hound/69cff672484dd54241b82e9c38f768fc25416f94/ui/assets/favicon.ico -------------------------------------------------------------------------------- /ui/assets/images/busy.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/itpp-labs/hound/69cff672484dd54241b82e9c38f768fc25416f94/ui/assets/images/busy.gif -------------------------------------------------------------------------------- /ui/assets/index.tpl.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | {{ .Title }} 7 | 8 | 9 | 12 | 13 | 14 |
15 | {{ .Promo }} 16 |
17 |
18 |
19 |
20 |
21 | 22 | 25 | {{ .Source }} 26 | 27 | 28 | -------------------------------------------------------------------------------- /ui/assets/js/common.test.js: -------------------------------------------------------------------------------- 1 | import { ExpandVars, UrlToRepo } from "./common"; 2 | 3 | describe("ExpandVars", () => { 4 | test("Replaces template variables with their values", () => { 5 | const template = "I am trying to {verb} my {noun}"; 6 | const values = { verb: "wash", noun: "dishes" }; 7 | expect(ExpandVars(template, values)).toBe( 8 | "I am trying to wash my dishes" 9 | ); 10 | }); 11 | 12 | test("Doesn't replace unlisted variables", () => { 13 | const template = "Get the {expletive} out of my {noun}"; 14 | const values1 = { noun: "stamp collection" }; 15 | 16 | expect(ExpandVars(template, values1)).toBe( 17 | "Get the {expletive} out of my stamp collection" 18 | ); 19 | expect(ExpandVars(template, {})).toBe(template); 20 | }); 21 | }); 22 | -------------------------------------------------------------------------------- /ui/assets/js/components/ExcludedFiles/ExcludedRow.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { UrlToRepo } from '../../helpers/common'; 3 | 4 | export const ExcludedRow = (props) => { 5 | 6 | const { file: { Filename, Reason }, repo } = props; 7 | 8 | const url = UrlToRepo(repo, Filename); 9 | 10 | return ( 11 | 12 | 13 | { Filename } 14 | 15 | { Reason } 16 | 17 | ); 18 | }; 19 | -------------------------------------------------------------------------------- /ui/assets/js/components/ExcludedFiles/ExcludedTable.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { ExcludedRow } from './ExcludedRow'; 3 | 4 | export const ExcludedTable = (props) => { 5 | 6 | const { files, searching, repo } = props; 7 | 8 | if (searching) { 9 | return ( 10 |
11 |
Searching...
12 |
13 | ); 14 | } 15 | 16 | const rows = files.map((file, index) => ( 17 | 22 | )); 23 | 24 | return ( 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | { rows } 33 |
FilenameReason
34 | ); 35 | }; 36 | -------------------------------------------------------------------------------- /ui/assets/js/components/ExcludedFiles/FilterableExcludedFiles.jsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from 'react'; 2 | import reqwest from 'reqwest'; 3 | import { RepoList } from './RepoList'; 4 | import { ExcludedTable } from './ExcludedTable' 5 | 6 | export const FilterableExcludedFiles = () => { 7 | 8 | const [ files, setFiles ] = useState([]); 9 | const [ repos, setRepos ] = useState([]); 10 | const [ repo, setRepo ] = useState(null); 11 | const [ searching, setSearching ] = useState(false); 12 | 13 | useEffect(() => { 14 | 15 | reqwest({ 16 | url: 'api/v1/repos', 17 | type: 'json', 18 | success (data) { 19 | setRepos(data); 20 | }, 21 | error (xhr, status, err) { 22 | // TODO(knorton): Fix these 23 | console.error(err); 24 | } 25 | }); 26 | 27 | }, []); 28 | 29 | const clickOnRepo = (repo) => { 30 | 31 | setSearching(true); 32 | setRepo(repos[repo]); 33 | 34 | reqwest({ 35 | url: 'api/v1/excludes', 36 | data: { repo: repo }, 37 | type: 'json', 38 | success (data) { 39 | setSearching(false); 40 | setFiles(data); 41 | }, 42 | error (xhr, status, err) { 43 | // TODO(knorton): Fix these 44 | console.error(err); 45 | } 46 | }); 47 | }; 48 | 49 | return ( 50 |
51 | Home 52 |

Excluded Files

53 | 54 |
55 | 56 | 57 |
58 |
59 | ); 60 | }; 61 | -------------------------------------------------------------------------------- /ui/assets/js/components/ExcludedFiles/RepoButton.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | export const RepoButton = (props) => { 4 | const { repo, currentRepo, onRepoClick } = props; 5 | return ( 6 | 9 | ); 10 | }; 11 | -------------------------------------------------------------------------------- /ui/assets/js/components/ExcludedFiles/RepoList.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { RepoButton } from './RepoButton'; 3 | 4 | export const RepoList = (props) => { 5 | 6 | const { repo, repos, onRepoClick } = props; 7 | 8 | const reposBlock = repos.map((item, index) => ( 9 | 15 | )); 16 | 17 | return ( 18 |
19 | { reposBlock } 20 |
21 | ); 22 | }; 23 | -------------------------------------------------------------------------------- /ui/assets/js/components/HoundApp/App.jsx: -------------------------------------------------------------------------------- 1 | import React, { Fragment, useState, useEffect } from 'react'; 2 | import { ParamsFromUrl } from '../../utils'; 3 | import { Model } from '../../helpers/Model'; 4 | import { SearchBar } from './SearchBar'; 5 | import { ResultView } from './ResultView'; 6 | import { SelectionTooltip } from "./SelectionTooltip"; 7 | 8 | export const App = function (props) { 9 | 10 | const [ query, setQuery ] = useState(''); 11 | const [ ignoreCase, setIgnoreCase ] = useState('nope'); 12 | const [ files, setFiles ] = useState(''); 13 | const [ excludeFiles, setExcludeFiles ] = useState(''); 14 | const [ repos, setRepos ] = useState([]); 15 | const [ reposRE, setReposRE ] = useState(''); 16 | const [ allRepos, setAllRepos ] = useState([]); 17 | const [ stats, setStats ] = useState(''); 18 | const [ reposPagination, setReposPagination ] = useState(null); 19 | const [ results, setResults ] = useState(null); 20 | const [ error, setError ] = useState(null); 21 | 22 | useEffect(() => { 23 | 24 | const config = Model.config; 25 | const InitSearch = config.InitSearch || {}; 26 | // This must be the same as on server side (see config.go > ToOpenSearchParams) 27 | const initParams = { 28 | q: InitSearch.q || '', 29 | i: InitSearch.i || 'nope', 30 | files: InitSearch.files || '', 31 | excludeFiles: InitSearch.excludeFiles || '', 32 | repos: InitSearch.repos || '.*' 33 | } 34 | const urlParams = ParamsFromUrl(initParams); 35 | setQuery(urlParams.q); 36 | setIgnoreCase(urlParams.i); 37 | setFiles(urlParams.files); 38 | setExcludeFiles(urlParams.excludeFiles); 39 | setReposRE(urlParams.repos); 40 | 41 | Model.didLoadRepos.tap((model, allRepos) => { 42 | // If all repos are selected, don't show any selected. 43 | if (model.ValidRepos(repos).length === model.RepoCount()) { 44 | setRepos([]); 45 | } 46 | setAllRepos(Object.keys(allRepos)); 47 | }); 48 | 49 | Model.didSearch.tap((model, results, stats, reposPagination) => { 50 | setStats(stats); 51 | setResults(results); 52 | setReposPagination(reposPagination); 53 | setError(null); 54 | }); 55 | 56 | Model.didLoadMore.tap((model, repo, results) => { 57 | setResults([...results]); 58 | setError(null); 59 | }); 60 | 61 | Model.didLoadOtherRepos.tap((model, results, reposPagination) => { 62 | setResults(results); 63 | setReposPagination(reposPagination); 64 | setError(null); 65 | }); 66 | 67 | Model.didError.tap((model, error) => { 68 | setResults(null); 69 | setError(error); 70 | }); 71 | 72 | window.addEventListener('popstate', (e) => { 73 | const urlParams = ParamsFromUrl(); 74 | if ( urlParams.q !== query ) { setQuery(urlParams.q); } 75 | if ( urlParams.i !== ignoreCase ) { setIgnoreCase(urlParams.i) } 76 | if ( urlParams.files !== files ) { setFiles(urlParams.files) } 77 | if ( urlParams.excludeFiles !== excludeFiles ) { setExcludeFiles(urlParams.excludeFiles) } 78 | if ( urlParams.repos !== reposRE ) { 79 | setReposRE(urlParams.repos) 80 | setRepos([]) 81 | } 82 | Model.Search(urlParams); 83 | }); 84 | 85 | }, []); 86 | 87 | const updateHistory = (params) => { 88 | const path = `${ location.pathname }` 89 | + `?q=${ encodeURIComponent(params.q) }` 90 | + `&i=${ encodeURIComponent(params.i) }` 91 | + `&files=${ encodeURIComponent(params.files) }` 92 | + `&excludeFiles=${ encodeURIComponent(params.excludeFiles) }` 93 | + `&repos=${ encodeURIComponent(params.repos) }`; 94 | history.pushState({ path: path }, '', path); 95 | }; 96 | 97 | const onSearchRequested = (params) => { 98 | updateHistory(params); 99 | if ( params.q !== query ) { setQuery(params.q); } 100 | if ( params.i !== ignoreCase ) { setIgnoreCase(params.i) } 101 | if ( params.files !== files ) { setFiles(params.files) } 102 | if ( params.excludeFiles !== excludeFiles ) { setExcludeFiles(params.excludeFiles) } 103 | if ( params.repos !== reposRE ) { setReposRE(params.repos) } 104 | setResults(null); 105 | setReposPagination(null); 106 | Model.Search(params); 107 | }; 108 | 109 | return ( 110 | 111 | 122 | 129 | 130 | 131 | ); 132 | }; 133 | -------------------------------------------------------------------------------- /ui/assets/js/components/HoundApp/File.jsx: -------------------------------------------------------------------------------- 1 | import React, { createRef, useState } from 'react'; 2 | import { CoalesceMatches, ContentFor } from '../../utils'; 3 | import { Model } from '../../helpers/Model'; 4 | import { Match } from './Match'; 5 | 6 | export class File extends React.Component { 7 | constructor(props) { 8 | super(props) 9 | this.state = {showContent: true} 10 | this.textArea = createRef(null) 11 | this.toggleContent = this.toggleContent.bind(this) 12 | this.copyFilepath = this.copyFilepath.bind(this) 13 | } 14 | openOrClose(to_open) { 15 | this.setState({'showContent': to_open}) 16 | } 17 | toggleContent() { 18 | // console.log('toggleContent') 19 | this.setState({'showContent': !this.state.showContent}) 20 | } 21 | copyFilepath(evt) { 22 | evt.preventDefault() 23 | evt.stopPropagation() 24 | console.log(evt) 25 | this.textArea.current.select() 26 | document.execCommand('copy') 27 | } 28 | 29 | render (){ 30 | const filename = this.props.match.Filename; 31 | const blocks = CoalesceMatches(this.props.match.Matches); 32 | 33 | const matches = blocks.map((block, index) => ( 34 | 42 | )); 43 | 44 | return ( 45 |
46 | 52 |
53 | { matches } 54 |
55 | 56 |
57 | ) 58 | } 59 | 60 | }; 61 | -------------------------------------------------------------------------------- /ui/assets/js/components/HoundApp/FilesView.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Model } from '../../helpers/Model'; 3 | import { File } from './File'; 4 | 5 | export const FilesView = (props) => { 6 | 7 | const { matches, rev, repo, regexp, totalMatches, filesCollection } = props; 8 | 9 | const onLoadMore = () => Model.LoadMore(repo); 10 | 11 | const files = matches.map((match, index) => { 12 | return ( 13 | filesCollection[index] = ref } 16 | repo={ repo } 17 | rev={ rev } 18 | match={ match } 19 | regexp={ regexp } 20 | /> 21 | ) 22 | }); 23 | 24 | const more = (matches.length < totalMatches) 25 | ? ( 26 | 29 | ) 30 | : ''; 31 | 32 | return ( 33 |
34 | { files } 35 | { more } 36 |
37 | ); 38 | 39 | }; 40 | -------------------------------------------------------------------------------- /ui/assets/js/components/HoundApp/Line.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Model } from '../../helpers/Model'; 3 | import { ContentFor } from '../../utils'; 4 | 5 | export const Line = (props) => { 6 | 7 | const { line, rev, repo, filename, regexp } = props; 8 | const content = ContentFor(Model.repos[ repo ], line, regexp); 9 | 10 | return ( 11 | 20 | ); 21 | 22 | }; 23 | -------------------------------------------------------------------------------- /ui/assets/js/components/HoundApp/Match.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Line } from './Line'; 3 | 4 | export const Match = (props) => { 5 | 6 | const { block, repo, regexp, rev, filename } = props; 7 | 8 | const lines = block.map((line, index) => { 9 | return ( 10 | 18 | ); 19 | }); 20 | 21 | return ( 22 |
23 | { lines } 24 |
25 | ); 26 | 27 | }; 28 | -------------------------------------------------------------------------------- /ui/assets/js/components/HoundApp/Repo.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Model } from '../../helpers/Model'; 3 | import { FilesView } from './FilesView'; 4 | 5 | export class Repo extends React.Component { 6 | constructor(props) { 7 | super(props) 8 | this.state = {repoOpen: true} 9 | this.filesCollection = {} 10 | this.toggleFiles = this.toggleFiles.bind(this) 11 | } 12 | 13 | openOrCloseFiles(to_open) { 14 | this.setState({ 15 | repoOpen: to_open 16 | }) 17 | for (let index in this.filesCollection) { 18 | let f = this.filesCollection[index] 19 | f.openOrClose(to_open) 20 | } 21 | } 22 | toggleFiles() { 23 | this.openOrCloseFiles(!this.state.repoOpen) 24 | } 25 | openFiles () { 26 | this.openOrCloseFiles(true) 27 | } 28 | closeFiles () { 29 | this.openOrCloseFiles(false) 30 | } 31 | 32 | render () { 33 | return ( 34 |
35 |
36 | 37 | { Model.NameForRepo(this.props.repo) } 38 | 39 |
40 | 49 |
50 | ) 51 | } 52 | } 53 | 54 | 55 | /* 56 | 57 | export const Repo = (props) => { 58 | const { ref, repo, rev, matches, regexp, files, stateShow } = props; 59 | const [ status, setStatus] = stateShow 60 | 61 | const openOrCloseAll = (to_open) => { 62 | setStatus(to_open) 63 | for (let index in filesShowState) { 64 | let [state, setState] = reposShowState[index] 65 | if (to_open) { 66 | setState(true) 67 | } else { 68 | setState(false) 69 | } 70 | } 71 | } 72 | 73 | const toggleStatus = () => { 74 | openOrCloseAll(!status) 75 | } 76 | 77 | const filesShowState = {} 78 | 79 | return ( 80 |
81 |
82 | 83 | { Model.NameForRepo(repo) } 84 | 85 |
86 | 94 |
95 | ) 96 | } 97 | 98 | import React from 'react'; 99 | import { Model } from '../../helpers/Model'; 100 | import { FilesView } from './FilesView'; 101 | 102 | class Repo extends React.Component { 103 | constructor(props) { 104 | super(props); 105 | this.state = {visible: true}; 106 | } 107 | 108 | // const { ref, repo, rev, matches, regexp, files } = props; 109 | // const [ showContent, setShowContent] = useState(true); 110 | 111 | toggleContent() { 112 | this.setState({ 113 | visible: !this.state.visible 114 | }) 115 | } 116 | open () { 117 | this.setState({ 118 | visible: true 119 | }) 120 | } 121 | close () { 122 | this.setState({ 123 | visible: false 124 | }) 125 | } 126 | 127 | render () { 128 | return ( 129 |
130 |
131 | 132 | { Model.NameForRepo(repo) } 133 | 134 |
135 | 142 |
143 | ) 144 | } 145 | } 146 | 147 | 148 | */ 149 | -------------------------------------------------------------------------------- /ui/assets/js/components/HoundApp/ResultView.jsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { Model } from '../../helpers/Model'; 3 | import { FilesView } from './FilesView'; 4 | import { Repo } from './Repo'; 5 | 6 | export const ResultView = (props) => { 7 | 8 | const { query, ignoreCase, results, reposPagination, error } = props; 9 | const isLoading = results === null && query; 10 | const noResults = !!results && results.length === 0; 11 | 12 | const renderError = (message, hint) => { 13 | return ( 14 |
15 | ERROR:{ message } 16 |
17 | ) 18 | } 19 | 20 | if (error) { 21 | return renderError(error) 22 | } 23 | 24 | let regexp 25 | try { 26 | regexp = new RegExp(query.trim(), ignoreCase.trim() === 'fosho' && 'ig' || 'g'); 27 | } catch (exc) { 28 | return renderError(exc.message) 29 | } 30 | 31 | if (!isLoading && noResults) { 32 | // TODO(knorton): We need something better here. :-( 33 | return ( 34 |
35 | “Nothing for you, Dawg.”
0 results
36 |
37 | ); 38 | } 39 | 40 | const openOrCloseAll = (to_open) => { 41 | for (let index in reposRefs) { 42 | let repo = reposRefs[index] 43 | if (to_open) { 44 | repo.openFiles() 45 | } else { 46 | repo.closeFiles() 47 | } 48 | } 49 | } 50 | const openAll = () => { 51 | openOrCloseAll(true) 52 | } 53 | const closeAll = () => { 54 | openOrCloseAll(false) 55 | } 56 | 57 | const actions = results && results.length ? ( 58 |
59 | 60 | 61 |
62 | ) : "" 63 | 64 | const onLoadOtherRepos = () => Model.LoadOtherRepos(); 65 | 66 | const loadOtherRepos = reposPagination && reposPagination.OtherRepos > 1 ? ( 67 | 70 | ) : "" 71 | 72 | const reposRefs = {} 73 | const repos = results 74 | ? results.map((result, index) => { 75 | /* 76 | let state = useState(true) 77 | reposShowState[index] = state 78 | */ 79 | return ( 80 | reposRefs[index] = ref} 82 | matches={result.Matches} 83 | rev={result.Rev} 84 | repo={result.Repo} 85 | regexp={regexp} 86 | files={result.FilesWithMatch}/> 87 | ) 88 | }) : ''; 89 | 90 | return ( 91 |
92 |
93 |
Searching...
94 |
95 | { actions } 96 | { repos } 97 | { loadOtherRepos } 98 |
99 | ); 100 | }; 101 | -------------------------------------------------------------------------------- /ui/assets/js/components/HoundApp/SelectionTooltip.jsx: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect } from 'react'; 2 | import { SelectionManager } from '../../helpers/SelectionManager'; 3 | 4 | export const SelectionTooltip = (props) => { 5 | 6 | const { delay } = props; 7 | 8 | const supported = SelectionManager.Supported(); 9 | const [ active, setActive ] = useState(false); 10 | const [ data, setData ] = useState({ 11 | url: '', 12 | top: 0, 13 | left: 0, 14 | text: '' 15 | }); 16 | 17 | let timeoutDelay; 18 | 19 | useEffect(() => { 20 | 21 | if ( supported ) { 22 | 23 | document.addEventListener('click', () => { 24 | 25 | clearTimeout(delay); 26 | 27 | timeoutDelay = setTimeout(() => { 28 | 29 | const selection = SelectionManager.GetSelection(); 30 | 31 | if (selection) { 32 | setData(selection); 33 | setActive(true); 34 | } else { 35 | setActive(false); 36 | } 37 | 38 | }, delay); 39 | 40 | }); 41 | 42 | } 43 | 44 | }, []); 45 | 46 | const onClickTooltip = (e) => { 47 | e.stopPropagation(); 48 | setTimeout( () => { 49 | SelectionManager.clearSelection(); 50 | setActive(false); 51 | }, 100); 52 | }; 53 | 54 | const element = supported 55 | ? ( 56 | 64 | 65 | { data.text } 66 | 67 | 68 | ) 69 | : '' 70 | 71 | return element; 72 | 73 | }; 74 | -------------------------------------------------------------------------------- /ui/assets/js/excluded_files.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import { FilterableExcludedFiles } from './components/ExcludedFiles/FilterableExcludedFiles'; 4 | 5 | ReactDOM.render( 6 | , 7 | document.getElementById('root') 8 | ); 9 | -------------------------------------------------------------------------------- /ui/assets/js/helpers/Model.js: -------------------------------------------------------------------------------- 1 | import reqwest from 'reqwest'; 2 | import { Signal } from './Signal'; 3 | import { UrlToRepo } from './common'; 4 | import { ParamsFromUrl } from '../utils'; 5 | import { parsePatternLinks } from './PatternLinks'; 6 | 7 | /** 8 | * The data model for the UI is responsible for conducting searches and managing 9 | * all results. 10 | */ 11 | export const Model = { 12 | // raised when a search begins 13 | willSearch: new Signal(), 14 | 15 | // raised when a search completes 16 | didSearch: new Signal(), 17 | 18 | willLoadMore: new Signal(), 19 | 20 | didLoadMore: new Signal(), 21 | 22 | willLoadOtherRepos: new Signal(), 23 | didLoadOtherRepos: new Signal(), 24 | 25 | didError: new Signal(), 26 | 27 | didLoadRepos : new Signal(), 28 | 29 | ValidRepos (repos) { 30 | const all = this.repos; 31 | const seen = {}; 32 | return repos.filter((repo) => { 33 | const valid = all[repo] && !seen[repo]; 34 | seen[repo] = true; 35 | return valid; 36 | }); 37 | }, 38 | 39 | RepoCount () { 40 | return Object.keys(this.repos).length; 41 | }, 42 | 43 | LoadConfig () { 44 | this.config = {}; 45 | if (typeof ClientConfigJson != 'undefined') { 46 | this.config = JSON.parse(ClientConfigJson); 47 | } 48 | }, 49 | 50 | Load () { 51 | 52 | const _this = this; 53 | 54 | const next = () => { 55 | const params = ParamsFromUrl(); 56 | this.didLoadRepos.raise(this, this.repos); 57 | if (params.q !== '') { 58 | this.Search(params); 59 | } 60 | }; 61 | 62 | reqwest({ 63 | url: 'api/v1/repos', 64 | type: 'json', 65 | success (data) { 66 | _this.repos = parsePatternLinks(data); 67 | next(); 68 | }, 69 | error (xhr, status, err) { 70 | // TODO(knorton): Fix these 71 | console.error(err); 72 | } 73 | }); 74 | }, 75 | 76 | processResults (matches, reset) { 77 | 78 | const results = []; 79 | 80 | for (const res of matches) { 81 | results.push({ 82 | Repo: res.Name, 83 | Rev: res.Revision, 84 | Matches: res.Matches, 85 | FilesWithMatch: res.FilesWithMatch, 86 | }); 87 | } 88 | // TODO: do we need to order by matches count? 89 | // results.sort((a, b) => b.Matches.length - a.Matches.length || a.Repo.localeCompare(b.Repo)); 90 | if (reset) { 91 | this.results = results; 92 | } else { 93 | this.results = this.results.concat(results); 94 | } 95 | 96 | const byRepo = this.results.reduce((obj, res) => (obj[res.Repo] = res, obj), {}); 97 | 98 | this.resultsByRepo = byRepo; 99 | 100 | }, 101 | 102 | Search (params) { 103 | 104 | const _this = this; 105 | const startedAt = Date.now(); 106 | 107 | this.willSearch.raise(this, params); 108 | 109 | params = { 110 | stats: 'fosho', 111 | repos: '*', 112 | rng: ':20', 113 | ...params 114 | }; 115 | 116 | if (params.repos === '') { 117 | params.repos = '*'; 118 | } 119 | 120 | this.params = params; 121 | 122 | // An empty query is basically useless, so rather than 123 | // sending it to the server and having the server do work 124 | // to produce an error, we simply return empty results 125 | // immediately in the client. 126 | if (params.q === '') { 127 | this.results = []; 128 | this.resultsByRepo = {}; 129 | this.didSearch.raise(this, this.Results); 130 | return; 131 | } 132 | 133 | reqwest({ 134 | url: 'api/v1/search', 135 | data: params, 136 | type: 'json', 137 | success (data) { 138 | if (data.Error) { 139 | _this.didError.raise(_this, data.Error); 140 | return; 141 | } 142 | 143 | const stats = data.Stats; 144 | const reposPagination = data.ReposPagination; 145 | 146 | _this.processResults(data.Results, true); 147 | 148 | _this.stats = { 149 | Server: stats.Duration, 150 | Total: Date.now() - startedAt, 151 | Repos: stats.ReposScanned, 152 | Files: stats.FilesOpened 153 | }; 154 | _this.reposPagination = reposPagination; 155 | 156 | _this.didSearch.raise(_this, _this.results, _this.stats, _this.reposPagination); 157 | }, 158 | error (xhr, status, err) { 159 | _this.didError.raise(this, "The server broke down"); 160 | } 161 | }); 162 | }, 163 | 164 | LoadMore (repo) { 165 | const _this = this; 166 | const results = this.resultsByRepo[repo]; 167 | const numLoaded = results.Matches.length; 168 | const numNeeded = results.FilesWithMatch - numLoaded; 169 | const numToLoad = Math.min(2000, numNeeded); 170 | const endAt = numNeeded == numToLoad ? '' : '' + numToLoad; 171 | 172 | this.willLoadMore.raise(this, repo, numLoaded, numNeeded, numToLoad); 173 | 174 | const params = {...this.params, 175 | rng: numLoaded+':'+endAt, 176 | rngRepos: "0:1", 177 | repos: "^" + repo + "$" 178 | }; 179 | 180 | reqwest({ 181 | url: 'api/v1/search', 182 | data: params, 183 | type: 'json', 184 | success (data) { 185 | if (data.Error) { 186 | _this.didError.raise(_this, data.Error); 187 | return; 188 | } 189 | 190 | const result = data.Results[0]; 191 | results.Matches = results.Matches.concat(result.Matches); 192 | results.FilesWithMatch = result.FilesWithMatch; 193 | _this.didLoadMore.raise(_this, repo, _this.results); 194 | }, 195 | error (xhr, status, err) { 196 | _this.didError.raise(this, "The server broke down"); 197 | } 198 | }); 199 | }, 200 | 201 | LoadOtherRepos () { 202 | const _this = this; 203 | 204 | this.willLoadOtherRepos.raise(this); 205 | 206 | const params = {...this.params, 207 | rngRepos: this.reposPagination.NextOffset + ':' + this.reposPagination.NextLimit, 208 | }; 209 | 210 | reqwest({ 211 | url: 'api/v1/search', 212 | data: params, 213 | type: 'json', 214 | success (data) { 215 | if (data.Error) { 216 | _this.didError.raise(_this, data.Error); 217 | return; 218 | } 219 | 220 | _this.processResults(data.Results); 221 | _this.reposPagination = data.ReposPagination; 222 | _this.didLoadOtherRepos.raise(_this, _this.results, _this.reposPagination); 223 | }, 224 | error (xhr, status, err) { 225 | _this.didError.raise(this, "The server broke down"); 226 | } 227 | }); 228 | }, 229 | 230 | NameForRepo (repo) { 231 | return repo; 232 | // TODO: do we need this method? 233 | 234 | const info = this.repos[repo]; 235 | if (!info) { 236 | return repo; 237 | } 238 | 239 | const url = info.url; 240 | const ax = url.lastIndexOf('/'); 241 | if (ax < 0) { 242 | return repo; 243 | } 244 | 245 | const name = url.substring(ax + 1).replace(/\.git$/, ''); 246 | 247 | const bx = url.lastIndexOf('/', ax - 1); 248 | 249 | if (bx < 0) { 250 | return name; 251 | } 252 | 253 | return url.substring(bx + 1, ax) + ' / ' + name; 254 | }, 255 | 256 | UrlToRepo (repo, path, line, rev) { 257 | return UrlToRepo(this.repos[repo], path, line, rev); 258 | } 259 | 260 | }; 261 | -------------------------------------------------------------------------------- /ui/assets/js/helpers/PatternLinks.js: -------------------------------------------------------------------------------- 1 | const getPatternLinkRegExp = (patternLinks) => { 2 | patternLinks.forEach((item) => { 3 | item.reg = new RegExp(item.pattern, 'g'); 4 | item.regcopy = new RegExp(item.pattern, 'g'); 5 | }); 6 | const regArray = patternLinks.map((item) => item.pattern); 7 | return new RegExp(`(?:${ regArray.join('|') })`, 'g'); 8 | }; 9 | 10 | export const parsePatternLinks = (data) => { 11 | for (let repo in data) { 12 | if (data[repo]['pattern-links']) { 13 | data[repo]['pattern-link-reg'] = getPatternLinkRegExp(data[repo]['pattern-links']); 14 | } 15 | } 16 | return data; 17 | }; 18 | -------------------------------------------------------------------------------- /ui/assets/js/helpers/SelectionManager.js: -------------------------------------------------------------------------------- 1 | import { closestElement } from '../utils'; 2 | 3 | export const SelectionManager = { 4 | 5 | Supported () { 6 | return ( 7 | window && 8 | 'getSelection' in window && 9 | document && 10 | document.body && 11 | 'getBoundingClientRect' in document.body 12 | ); 13 | }, 14 | 15 | GetSelection () { 16 | 17 | const selection = window.getSelection(); 18 | const anchorNode = selection.anchorNode; 19 | const selectionText = selection.toString().trim(); 20 | const newLineReg = /[\r\n]+/; 21 | const escapeReg = /[.?*+^$[\]\\(){}|-]/g; 22 | const urlReg = /([\?&])q=([^&$]+)/; 23 | 24 | if ( selectionText.length && !newLineReg.test(selectionText) && closestElement(anchorNode, 'lval') ) { 25 | 26 | const url = window.location.href; 27 | const escapedText = encodeURIComponent(selectionText.replace(escapeReg, '\\$&')); 28 | const searchURL = url.replace(urlReg, '$1q=' + escapedText); 29 | 30 | const selectionRange = selection.getRangeAt(0); 31 | const selectionRect = selectionRange.getBoundingClientRect(); 32 | const scrollTop = window.pageYOffset || document.documentElement.scrollTop || document.body.scrollTop || 0; 33 | 34 | return { 35 | text: selectionText, 36 | url: searchURL, 37 | left: selectionRect.left, 38 | top: selectionRect.top + scrollTop + 25, 39 | }; 40 | 41 | } 42 | 43 | return null; 44 | 45 | }, 46 | 47 | clearSelection () { 48 | const selection = window.getSelection(); 49 | selection.removeAllRanges(); 50 | } 51 | 52 | }; 53 | -------------------------------------------------------------------------------- /ui/assets/js/helpers/Signal.js: -------------------------------------------------------------------------------- 1 | export const Signal = function () { 2 | }; 3 | 4 | Signal.prototype = { 5 | listeners : [], 6 | 7 | tap (l) { 8 | // Make a copy of the listeners to avoid the all too common 9 | // subscribe-during-dispatch problem 10 | this.listeners = this.listeners.slice(0); 11 | this.listeners.push(l); 12 | }, 13 | 14 | untap (l) { 15 | const ix = this.listeners.indexOf(l); 16 | if (ix == -1) { 17 | return; 18 | } 19 | 20 | // Make a copy of the listeners to avoid the all to common 21 | // unsubscribe-during-dispatch problem 22 | this.listeners = this.listeners.slice(0); 23 | this.listeners.splice(ix, 1); 24 | }, 25 | 26 | raise () { 27 | const args = Array.prototype.slice.call(arguments, 0); 28 | this.listeners.forEach((l) => { 29 | l.apply(this, args); 30 | }); 31 | } 32 | }; 33 | -------------------------------------------------------------------------------- /ui/assets/js/helpers/common.js: -------------------------------------------------------------------------------- 1 | export const ExpandVars = (template, values) => { 2 | for (let name in values) { 3 | template = template.replace('{' + name + '}', values[name]); 4 | } 5 | return template; 6 | }; 7 | 8 | export const UrlToRepo = (repo, path, line, rev) => { 9 | if (!repo){ 10 | // panic 11 | setTimeout(function(){ 12 | document.body.innerHTML = '

Something went wrong. Try to refresh page.

'; 13 | }, 100); 14 | throw("repo is undefined"); 15 | } 16 | let url = repo.url.replace(/\.git$/, ''); 17 | const pattern = repo['url-pattern']; 18 | const filename = path.substring(path.lastIndexOf('/') + 1); 19 | let anchor = line ? ExpandVars(pattern.anchor, { line, filename }) : ''; 20 | 21 | // Determine if the URL passed is a GitHub wiki 22 | const wikiUrl = /\.wiki$/.exec(url); 23 | 24 | if (wikiUrl) { 25 | url = url.replace(/\.wiki/, '/wiki') 26 | path = path.replace(/\.md$/, '') 27 | anchor = '' // wikis do not support direct line linking 28 | } 29 | 30 | // Hacky solution to fix _some more_ of the 404's when using SSH style URLs. 31 | // This works for both github style URLs (git@github.com:username/Foo.git) and 32 | // bitbucket style URLs (ssh://hg@bitbucket.org/username/Foo). 33 | 34 | // Regex explained: Match either `git` or `hg` followed by an `@`. 35 | // Next, slurp up the hostname by reading until either a `:` or `/` is found. 36 | // Finally, grab all remaining characters. 37 | const sshParts = /(git|hg)@(.*?)(:|\/)(.*)/.exec(url); 38 | 39 | if (sshParts) { 40 | url = '//' + sshParts[2] + '/' + sshParts[4]; 41 | } 42 | 43 | // I'm sure there is a nicer React/jsx way to do this: 44 | return ExpandVars(pattern['base-url'], { 45 | url, 46 | path, 47 | rev, 48 | anchor 49 | }); 50 | }; 51 | -------------------------------------------------------------------------------- /ui/assets/js/hound.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import { App } from './components/HoundApp/App'; 4 | import { Model } from './helpers/Model'; 5 | 6 | Model.LoadConfig(); 7 | 8 | ReactDOM.render( 9 | , 10 | document.getElementById('root') 11 | ); 12 | 13 | Model.Load(); 14 | -------------------------------------------------------------------------------- /ui/assets/js/utils/index.js: -------------------------------------------------------------------------------- 1 | export const FormatNumber = (t) => { 2 | let s = '' + (t|0); 3 | let b = []; 4 | while (s.length > 0) { 5 | b.unshift(s.substring(s.length - 3, s.length)); 6 | s = s.substring(0, s.length - 3); 7 | } 8 | return b.join(','); 9 | }; 10 | 11 | export const ParamsFromQueryString = (qs, params = {}) => { 12 | 13 | if (!qs) { 14 | return params; 15 | } 16 | 17 | qs.substring(1).split('&').forEach((v) => { 18 | const pair = v.split('='); 19 | if (pair.length != 2) { 20 | return; 21 | } 22 | 23 | // Handle classic '+' representation of spaces, such as is used 24 | // when Hound is set up in Chrome's Search Engine Manager settings 25 | pair[1] = pair[1].replace(/\+/g, ' '); 26 | 27 | params[decodeURIComponent(pair[0])] = decodeURIComponent(pair[1]); 28 | }); 29 | 30 | return params; 31 | }; 32 | 33 | export const ParamsFromUrl = ( 34 | params = { 35 | q: '', 36 | i: 'nope', 37 | files: '', 38 | excludeFiles: '', 39 | repos: '*' 40 | }) => ParamsFromQueryString(location.search, params); 41 | 42 | export const ParamValueToBool = (v) => { 43 | v = v.toLowerCase(); 44 | return v === 'fosho' || v === 'true' || v === '1'; 45 | }; 46 | 47 | /** 48 | * Take a list of matches and turn it into a simple list of lines. 49 | */ 50 | export const MatchToLines = (match) => { 51 | const lines = []; 52 | const base = match.LineNumber; 53 | const nBefore = match.Before.length; 54 | 55 | match.Before.forEach((line, index) => { 56 | lines.push({ 57 | Number : base - nBefore + index, 58 | Content: line, 59 | Match: false 60 | }); 61 | }); 62 | 63 | lines.push({ 64 | Number: base, 65 | Content: match.Line, 66 | Match: true 67 | }); 68 | 69 | match.After.forEach((line, index) => { 70 | lines.push({ 71 | Number: base + index + 1, 72 | Content: line, 73 | Match: false 74 | }); 75 | }); 76 | 77 | return lines; 78 | }; 79 | 80 | /** 81 | * Take several lists of lines each representing a matching block and merge overlapping 82 | * blocks together. A good example of this is when you have a match on two consecutive 83 | * lines. We will merge those into a singular block. 84 | * 85 | * TODO(knorton): This code is a bit skanky. I wrote it while sleepy. It can surely be 86 | * made simpler. 87 | */ 88 | export const CoalesceMatches = (matches) => { 89 | const blocks = matches.map(MatchToLines); 90 | const res = []; 91 | let current; 92 | // go through each block of lines and see if it overlaps 93 | // with the previous. 94 | for (let i = 0, n = blocks.length; i < n; i++) { 95 | const block = blocks[i]; 96 | const max = current ? current[current.length - 1].Number : -1; 97 | // if the first line in the block is before the last line in 98 | // current, we'll be merging. 99 | if (block[0].Number <= max) { 100 | block.forEach((line) => { 101 | if (line.Number > max) { 102 | current.push(line); 103 | } else if (current && line.Match) { 104 | // we have to go back into current and make sure that matches 105 | // are properly marked. 106 | current[current.length - 1 - (max - line.Number)].Match = true; 107 | } 108 | }); 109 | } else { 110 | if (current) { 111 | res.push(current); 112 | } 113 | current = block; 114 | } 115 | } 116 | 117 | if (current) { 118 | res.push(current); 119 | } 120 | 121 | return res; 122 | }; 123 | 124 | /** 125 | * Use the DOM to safely htmlify some text. 126 | */ 127 | export const EscapeHtml = ((div) => { 128 | return ( 129 | (text) => { 130 | div.textContent = text; 131 | return div.innerHTML; 132 | } 133 | ); 134 | })( document.createElement('div') ); 135 | 136 | /** 137 | * Produce html for a line using the regexp to highlight matches and the regexp to replace pattern-links. 138 | */ 139 | export const ContentFor = (repo, line, regexp) => { 140 | 141 | const startEm = ''; 142 | const endEm = ''; 143 | const indexes = []; 144 | 145 | // Store the search matches 146 | if (line.Match) { 147 | let matches; 148 | let len = 0; 149 | while ( (matches = regexp.exec(line.Content)) !== null ) { 150 | if (!len) { len = matches[0].length; } 151 | if (!len) { 152 | // Empty match! 153 | // Example of such regexp: "|" 154 | // Make a workaround for infinite loop 155 | // Reference: https://stackoverflow.com/questions/33015942/regex-exec-loop-never-terminates-in-js 156 | regexp.lastIndex++; 157 | } 158 | indexes.push( 159 | { index: matches.index, element: startEm }, 160 | { index: matches.index + len, element: endEm } 161 | ); 162 | } 163 | regexp.lastIndex = 0; 164 | } 165 | 166 | // Store links matches 167 | if ( repo['pattern-link-reg'] ) { 168 | 169 | const matches = line.Content.match(repo['pattern-link-reg']); 170 | 171 | if (matches) { 172 | 173 | const numberOfMatches = matches.length; 174 | let matchesProcessed = 0; 175 | 176 | // Iterate over all the pattern replacement items 177 | repo['pattern-links'].some((item) => { 178 | if ( item.reg.test(line.Content) ) { 179 | item.reg.lastIndex = 0; 180 | let matches; 181 | while ( (matches = item.reg.exec(line.Content)) !== null ) { 182 | indexes.push( 183 | { 184 | index: matches.index, 185 | element: `` 186 | }, 187 | { 188 | index: matches.index + matches[0].length, 189 | element: '' 190 | } 191 | ); 192 | } 193 | // Exit the loop when all matches have been processed 194 | matchesProcessed++; 195 | if (matchesProcessed === numberOfMatches) { 196 | return true; 197 | } 198 | } 199 | item.reg.lastIndex = 0; 200 | }); 201 | 202 | } 203 | } 204 | 205 | if (indexes.length) { 206 | 207 | // Order the array 208 | indexes.sort((a, b) => a.index - b.index); 209 | 210 | let formatting = false; 211 | const totalIndexes = indexes.length - 1; 212 | 213 | return indexes.reduce((content, item, index, array) => { 214 | 215 | content += EscapeHtml(line.Content.slice(index ? array[index - 1].index : 0, item.index)); 216 | 217 | if (item.element !== startEm && item.element !== endEm && formatting) { 218 | content += `${endEm}${item.element}${startEm}`; 219 | } else { 220 | content += item.element; 221 | } 222 | 223 | if (index === totalIndexes) { 224 | content += EscapeHtml(line.Content.slice(array[index].index)); 225 | } 226 | 227 | if (item.element === startEm) { formatting = true; } 228 | if (item.element === endEm) { formatting = false; } 229 | 230 | return content; 231 | 232 | }, ''); 233 | 234 | } 235 | 236 | return EscapeHtml(line.Content); 237 | }; 238 | 239 | /** 240 | * Return the closest parent element 241 | * @param element 242 | * @param className 243 | */ 244 | export const closestElement = (element, className) => { 245 | while (element.className !== className) { 246 | element = element.parentNode; 247 | if (!element) { 248 | return null; 249 | } 250 | } 251 | return element; 252 | }; 253 | -------------------------------------------------------------------------------- /ui/assets/open_search.tpl.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Hound 5 | Search code with Hound 6 | Hound 7 | 10 | 11 | -------------------------------------------------------------------------------- /ui/content.go: -------------------------------------------------------------------------------- 1 | package ui 2 | 3 | import "io" 4 | 5 | var contents map[string]*content 6 | 7 | // This interface abstracts the Execute method on template which is 8 | // structurally similar in both html/template and text/template. 9 | // We need to use an interface instead of a direct template field 10 | // because then we will need two different fields for html template 11 | // and text template. 12 | type renderer interface { 13 | Execute(w io.Writer, data interface{}) error 14 | } 15 | 16 | type content struct { 17 | 18 | // The uri for accessing this asset 19 | uri string 20 | 21 | // The filename of the template relative to the asset directory 22 | template string 23 | 24 | // The JavaScript sources used in this HTML page 25 | sources []string 26 | 27 | // The parsed template - can be of html/template or text/template type 28 | tpl renderer 29 | 30 | // This is used to determine if a template is to be parsed as text or html 31 | tplType string 32 | } 33 | 34 | func init() { 35 | // The following are HTML assets that are rendered via 36 | // template. 37 | contents = map[string]*content{ 38 | 39 | "/": &content{ 40 | template: "index.tpl.html", 41 | sources: []string{ 42 | "js/hound.js", 43 | }, 44 | tplType: "html", 45 | }, 46 | 47 | "/open_search.xml": &content{ 48 | template: "open_search.tpl.xml", 49 | tplType: "xml", 50 | }, 51 | 52 | "/excluded_files.html": &content{ 53 | template: "excluded_files.tpl.html", 54 | sources: []string{ 55 | "js/excluded_files.js", 56 | }, 57 | tplType: "html", 58 | }, 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /ui/ui.go: -------------------------------------------------------------------------------- 1 | package ui 2 | 3 | import ( 4 | "bytes" 5 | "errors" 6 | "fmt" 7 | html_template "html/template" 8 | "io" 9 | "log" 10 | "math/rand" 11 | "net/http" 12 | "path/filepath" 13 | "runtime" 14 | "strings" 15 | text_template "text/template" 16 | "time" 17 | 18 | "github.com/itpp-labs/hound/config" 19 | ) 20 | 21 | // An http.Handler for the dev-mode case. 22 | type devHandler struct { 23 | // A simple file server for serving non-template assets 24 | http.Handler 25 | 26 | // the collection of templated assets 27 | content map[string]*content 28 | 29 | // the root asset dir 30 | root string 31 | 32 | // the config we are running on 33 | cfg *config.Config 34 | } 35 | 36 | // An http.Handler for the prd-mode case. 37 | type prdHandler struct { 38 | // The collection of templated assets w/ their templates pre-parsed 39 | content map[string]*content 40 | 41 | // The config object as a json string 42 | cfgJson string 43 | 44 | // the config we are running on 45 | cfg *config.Config 46 | 47 | // OpenSearch args as string 48 | initSearch string 49 | } 50 | 51 | func (h *devHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { 52 | p := r.URL.Path 53 | 54 | // See if we have templated content for this path 55 | cr := h.content[p] 56 | if cr == nil { 57 | if serveFavicon(p, h.cfg.Favicon, w, r) { 58 | return 59 | } 60 | // if not, serve up files 61 | h.Handler.ServeHTTP(w, r) 62 | return 63 | } 64 | 65 | // If so, render the HTML 66 | w.Header().Set("Content-Type", "text/html;charset=utf-8") 67 | if err := renderForDev(w, h.root, cr, h.cfg, r); err != nil { 68 | log.Panic(err) 69 | } 70 | } 71 | 72 | func serveFavicon(path string, fav *config.Favicon, w http.ResponseWriter, r *http.Request) bool { 73 | if path == "/favicon.ico" && fav != nil { 74 | http.ServeContent(w, r, "favicon.ico", fav.ModTime, bytes.NewReader(fav.Image)) 75 | return true 76 | } 77 | return false 78 | } 79 | 80 | // Renders a templated asset in dev-mode. This simply embeds external script tags 81 | // for the source elements. 82 | func renderForDev(w io.Writer, root string, c *content, cfg *config.Config, r *http.Request) error { 83 | var err error 84 | // For more context, see: https://github.com/etsy/hound/issues/239 85 | switch c.tplType { 86 | case "html": 87 | // Use html/template to parse the html template 88 | c.tpl, err = html_template.ParseFiles(filepath.Join(root, c.template)) 89 | if err != nil { 90 | return err 91 | } 92 | case "xml", "text": 93 | // Use text/template to parse the xml or text templates 94 | // We are using text/template here for parsing xml to keep things 95 | // consistent with html/template parsing. 96 | c.tpl, err = text_template.ParseFiles(filepath.Join(root, c.template)) 97 | if err != nil { 98 | return err 99 | } 100 | default: 101 | return errors.New("invalid tplType for content") 102 | } 103 | 104 | json, err := cfg.ToJsonString() 105 | if err != nil { 106 | return err 107 | } 108 | 109 | initSearch, err := cfg.ToOpenSearchParams() 110 | if err != nil { 111 | return err 112 | } 113 | initSearch = strings.Replace(initSearch, "&", "&", -1) 114 | 115 | var buf bytes.Buffer 116 | for _, path := range c.sources { 117 | // TODO: Use port from webpack.config.js -> devServer.port 118 | fmt.Fprintf(&buf, "", path) 119 | } 120 | 121 | return c.tpl.Execute(w, map[string]interface{}{ 122 | "ClientConfigJson": json, 123 | "Title": cfg.Title, 124 | "Source": html_template.HTML(buf.String()), 125 | "Host": r.Host, 126 | "InitSearch": initSearch, 127 | }) 128 | } 129 | 130 | // Serve an asset over HTTP. This ensures we get proper support for range 131 | // requests and if-modified-since checks. 132 | func serveAsset(w http.ResponseWriter, r *http.Request, name string) { 133 | n, err := AssetInfo(name) 134 | if err != nil { 135 | http.NotFound(w, r) 136 | return 137 | } 138 | 139 | a, err := Asset(name) 140 | if err != nil { 141 | http.NotFound(w, r) 142 | return 143 | } 144 | 145 | http.ServeContent(w, r, n.Name(), n.ModTime(), bytes.NewReader(a)) 146 | } 147 | 148 | func (h *prdHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { 149 | p := r.URL.Path 150 | 151 | // see if we have a templated asset for this path 152 | ct := h.content[p] 153 | if ct != nil { 154 | // if so, render it 155 | if err := renderForPrd(w, ct, h.cfg, h.cfgJson, h.initSearch, r); err != nil { 156 | log.Panic(err) 157 | } 158 | return 159 | } 160 | if serveFavicon(p, h.cfg.Favicon, w, r) { 161 | return 162 | } 163 | 164 | // otherwise, we need to find the asset in the bundled asset 165 | // data. Assets are relative to the asset directory, so we need 166 | // to remove the leading '/' in the path. 167 | serveAsset(w, r, p[1:]) 168 | } 169 | 170 | // Renders a templated asset in prd-mode. This strategy will embed 171 | // the sources directly in a script tag on the templated page. 172 | func renderForPrd(w io.Writer, c *content, cfg *config.Config, cfgJson string, initSearch string, r *http.Request) error { 173 | var buf bytes.Buffer 174 | buf.WriteString("") 183 | 184 | var randomAd string // Initialize randomAd with an empty string 185 | 186 | if cfg.Ads != nil && len(cfg.Ads) > 0 { 187 | // Seed the random number generator (typically done once) 188 | rand.Seed(time.Now().UnixNano()) 189 | 190 | // Generate a random index within the bounds of the Ads slice 191 | randomIndex := rand.Intn(len(cfg.Ads)) 192 | 193 | // Access the random ad string 194 | randomAd = *cfg.Ads[randomIndex] 195 | } 196 | 197 | return c.tpl.Execute(w, map[string]interface{}{ 198 | "ClientConfigJson": cfgJson, 199 | "Title": cfg.Title, 200 | "Source": html_template.HTML(buf.String()), 201 | "Host": r.Host, 202 | "InitSearch": initSearch, 203 | "Promo": html_template.HTML(randomAd), 204 | }) 205 | } 206 | 207 | // Used for dev-mode only. Determime the asset directory where 208 | // we can find all our web files for direct serving. 209 | func assetDir() string { 210 | _, file, _, _ := runtime.Caller(0) 211 | dir, err := filepath.Abs( 212 | filepath.Join(filepath.Dir(file), "assets")) 213 | if err != nil { 214 | log.Panic(err) 215 | } 216 | return dir 217 | } 218 | 219 | // Create an http.Handler for dev-mode. 220 | func newDevHandler(cfg *config.Config) (http.Handler, error) { 221 | root := assetDir() 222 | return &devHandler{ 223 | Handler: http.FileServer(http.Dir(root)), 224 | content: contents, 225 | root: root, 226 | cfg: cfg, 227 | }, nil 228 | } 229 | 230 | // Create an http.Handler for prd-mode. 231 | func newPrdHandler(cfg *config.Config) (http.Handler, error) { 232 | for _, cnt := range contents { 233 | a, err := Asset(cnt.template) 234 | if err != nil { 235 | return nil, err 236 | } 237 | 238 | // For more context, see: https://github.com/etsy/hound/issues/239 239 | switch cnt.tplType { 240 | case "html": 241 | // Use html/template to parse the html template 242 | cnt.tpl, err = html_template.New(cnt.template).Parse(string(a)) 243 | if err != nil { 244 | return nil, err 245 | } 246 | case "xml", "text": 247 | // Use text/template to parse the xml or text templates 248 | // We are using text/template here for parsing xml to keep things 249 | // consistent with html/template parsing. 250 | cnt.tpl, err = text_template.New(cnt.template).Parse(string(a)) 251 | if err != nil { 252 | return nil, err 253 | } 254 | default: 255 | return nil, errors.New("invalid tplType for content") 256 | } 257 | } 258 | 259 | json, err := cfg.ToJsonString() 260 | if err != nil { 261 | return nil, err 262 | } 263 | initSearch, err := cfg.ToOpenSearchParams() 264 | if err != nil { 265 | return nil, err 266 | } 267 | initSearch = strings.Replace(initSearch, "&", "&", -1) 268 | 269 | return &prdHandler{ 270 | content: contents, 271 | cfg: cfg, 272 | cfgJson: json, 273 | initSearch: initSearch, 274 | }, nil 275 | } 276 | 277 | // Create an http.Handler for serving the web assets. If dev is true, 278 | // the http.Handler that is returned will serve assets directly our of 279 | // the source directories making rapid web development possible. If dev 280 | // is false, the http.Handler will serve assets out of data embedded 281 | // in the executable. 282 | func Content(dev bool, cfg *config.Config) (http.Handler, error) { 283 | if dev { 284 | return newDevHandler(cfg) 285 | } 286 | 287 | return newPrdHandler(cfg) 288 | } 289 | -------------------------------------------------------------------------------- /vcs/bzr.go: -------------------------------------------------------------------------------- 1 | package vcs 2 | 3 | import ( 4 | "bytes" 5 | "io" 6 | "log" 7 | "os/exec" 8 | "path/filepath" 9 | "strings" 10 | ) 11 | 12 | func init() { 13 | Register(newBzr, "bzr") 14 | } 15 | 16 | func newBzr(b []byte) (Driver, error) { 17 | return &BzrDriver{}, nil 18 | } 19 | 20 | type BzrDriver struct{} 21 | 22 | func (g *BzrDriver) HeadRev(dir string) (string, error) { 23 | cmd := exec.Command( 24 | "bzr", 25 | "revno") 26 | cmd.Dir = dir 27 | r, err := cmd.StdoutPipe() 28 | if err != nil { 29 | return "", err 30 | } 31 | defer r.Close() 32 | 33 | if err := cmd.Start(); err != nil { 34 | return "", err 35 | } 36 | 37 | var buf bytes.Buffer 38 | 39 | if _, err := io.Copy(&buf, r); err != nil { 40 | return "", err 41 | } 42 | 43 | return strings.TrimSpace(buf.String()), cmd.Wait() 44 | } 45 | 46 | func (g *BzrDriver) Pull(dir string) (string, error) { 47 | cmd := exec.Command("bzr", "pull") 48 | cmd.Dir = dir 49 | out, err := cmd.CombinedOutput() 50 | if err != nil { 51 | log.Printf("Failed to bzr pull %s, see output below\n%sContinuing...", dir, out) 52 | return "", err 53 | } 54 | 55 | return g.HeadRev(dir) 56 | } 57 | 58 | func (g *BzrDriver) Clone(dir, url string) (string, error) { 59 | par, rep := filepath.Split(dir) 60 | cmd := exec.Command( 61 | "bzr", 62 | "branch", 63 | url, 64 | rep) 65 | cmd.Dir = par 66 | out, err := cmd.CombinedOutput() 67 | if err != nil { 68 | log.Printf("Failed to clone %s, see output below\n%sContinuing...", url, out) 69 | return "", err 70 | } 71 | 72 | return g.HeadRev(dir) 73 | } 74 | 75 | func (g *BzrDriver) SpecialFiles() []string { 76 | return []string{ 77 | ".bzr", 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /vcs/git.go: -------------------------------------------------------------------------------- 1 | package vcs 2 | 3 | import ( 4 | "bytes" 5 | "encoding/json" 6 | "fmt" 7 | "io" 8 | "log" 9 | "os/exec" 10 | "path/filepath" 11 | "strings" 12 | ) 13 | 14 | const defaultRef = "master" 15 | 16 | func init() { 17 | Register(newGit, "git") 18 | } 19 | 20 | type GitDriver struct { 21 | Ref string `json:"ref"` 22 | } 23 | 24 | func newGit(b []byte) (Driver, error) { 25 | d := &GitDriver{ 26 | Ref: defaultRef, 27 | } 28 | 29 | if b == nil { 30 | return d, nil 31 | } 32 | 33 | if e := json.Unmarshal(b, d); e != nil { 34 | return nil, e 35 | } 36 | return d, nil 37 | } 38 | 39 | func (g *GitDriver) HeadRev(dir string) (string, error) { 40 | cmd := exec.Command( 41 | "git", 42 | "rev-parse", 43 | "HEAD") 44 | cmd.Dir = dir 45 | r, err := cmd.StdoutPipe() 46 | if err != nil { 47 | return "", err 48 | } 49 | defer r.Close() 50 | 51 | if err := cmd.Start(); err != nil { 52 | return "", err 53 | } 54 | 55 | var buf bytes.Buffer 56 | 57 | if _, err := io.Copy(&buf, r); err != nil { 58 | return "", err 59 | } 60 | 61 | return strings.TrimSpace(buf.String()), cmd.Wait() 62 | } 63 | 64 | func run(desc, dir, cmd string, args ...string) error { 65 | c := exec.Command(cmd, args...) 66 | c.Dir = dir 67 | if out, err := c.CombinedOutput(); err != nil { 68 | log.Printf( 69 | "Failed to %s %s, see output below\n%sContinuing...", 70 | desc, 71 | dir, 72 | out) 73 | return err 74 | } 75 | return nil 76 | } 77 | 78 | func (g *GitDriver) Pull(dir string) (string, error) { 79 | if err := run("git fetch", dir, 80 | "git", 81 | "fetch", 82 | "--prune", 83 | "--no-tags", 84 | "--depth", "1", 85 | "origin", 86 | fmt.Sprintf("+%s:remotes/origin/%s", g.Ref, g.Ref)); err != nil { 87 | return "", err 88 | } 89 | 90 | if err := run("git reset", dir, 91 | "git", 92 | "reset", 93 | "--hard", 94 | fmt.Sprintf("origin/%s", g.Ref)); err != nil { 95 | return "", err 96 | } 97 | 98 | return g.HeadRev(dir) 99 | } 100 | 101 | func (g *GitDriver) Clone(dir, url string) (string, error) { 102 | par, rep := filepath.Split(dir) 103 | cmd := exec.Command( 104 | "git", 105 | "clone", 106 | "--depth", "1", 107 | "--branch", g.Ref, 108 | url, 109 | rep) 110 | cmd.Dir = par 111 | out, err := cmd.CombinedOutput() 112 | if err != nil { 113 | log.Printf("Failed to clone %s, see output below\n%sContinuing...", url, out) 114 | return "", err 115 | } 116 | 117 | return g.HeadRev(dir) 118 | } 119 | 120 | func (g *GitDriver) SpecialFiles() []string { 121 | return []string{ 122 | ".git", 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /vcs/git_test.go: -------------------------------------------------------------------------------- 1 | package vcs 2 | 3 | import "testing" 4 | 5 | func TestGitConfigWithCustomRef(t *testing.T) { 6 | cfg := `{"ref": "custom"}` 7 | d, err := New("git", []byte(cfg)) 8 | if err != nil { 9 | t.Fatal(err) 10 | } 11 | git := d.Driver.(*GitDriver) 12 | if git.Ref != "custom" { 13 | t.Fatalf("expected branch of \"custom\", got %s", git.Ref) 14 | } 15 | } 16 | 17 | func TestGitConfigWithoutRef(t *testing.T) { 18 | cfg := `{"option": "option"}` 19 | d, err := New("git", []byte(cfg)) 20 | if err != nil { 21 | t.Fatal(err) 22 | } 23 | git := d.Driver.(*GitDriver) 24 | if git.Ref != "master" { 25 | t.Fatalf("expected branch of \"master\", got %s", git.Ref) 26 | } 27 | } 28 | 29 | func TestGitConfigWithoutAdditionalConfig(t *testing.T) { 30 | d, err := New("git", nil) 31 | if err != nil { 32 | t.Fatal(err) 33 | } 34 | git := d.Driver.(*GitDriver) 35 | if git.Ref != "master" { 36 | t.Fatalf("expected branch of \"master\", got %s", git.Ref) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /vcs/hg.go: -------------------------------------------------------------------------------- 1 | package vcs 2 | 3 | import ( 4 | "bytes" 5 | "io" 6 | "io/ioutil" 7 | "os/exec" 8 | "path/filepath" 9 | "strings" 10 | ) 11 | 12 | func init() { 13 | Register(newHg, "hg", "mercurial") 14 | } 15 | 16 | type MercurialDriver struct{} 17 | 18 | func newHg(b []byte) (Driver, error) { 19 | return &MercurialDriver{}, nil 20 | } 21 | 22 | func (g *MercurialDriver) HeadRev(dir string) (string, error) { 23 | cmd := exec.Command( 24 | "hg", 25 | "log", 26 | "-r", 27 | ".", 28 | "--template", 29 | "{node}") 30 | cmd.Dir = dir 31 | r, err := cmd.StdoutPipe() 32 | if err != nil { 33 | return "", err 34 | } 35 | defer r.Close() 36 | 37 | if err := cmd.Start(); err != nil { 38 | return "", err 39 | } 40 | 41 | var buf bytes.Buffer 42 | 43 | if _, err := io.Copy(&buf, r); err != nil { 44 | return "", err 45 | } 46 | 47 | return strings.TrimSpace(buf.String()), cmd.Wait() 48 | } 49 | 50 | func (g *MercurialDriver) Pull(dir string) (string, error) { 51 | cmd := exec.Command("hg", "pull", "-u") 52 | cmd.Dir = dir 53 | err := cmd.Run() 54 | if err != nil { 55 | return "", err 56 | } 57 | 58 | return g.HeadRev(dir) 59 | } 60 | 61 | func (g *MercurialDriver) Clone(dir, url string) (string, error) { 62 | par, rep := filepath.Split(dir) 63 | cmd := exec.Command( 64 | "hg", 65 | "clone", 66 | url, 67 | rep) 68 | cmd.Dir = par 69 | cmd.Stdout = ioutil.Discard 70 | if err := cmd.Run(); err != nil { 71 | return "", err 72 | } 73 | 74 | return g.HeadRev(dir) 75 | } 76 | 77 | func (g *MercurialDriver) SpecialFiles() []string { 78 | return []string{ 79 | ".hg", 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /vcs/svn.go: -------------------------------------------------------------------------------- 1 | package vcs 2 | 3 | import ( 4 | "bytes" 5 | "encoding/json" 6 | "io" 7 | "log" 8 | "os/exec" 9 | "path/filepath" 10 | "strings" 11 | ) 12 | 13 | func init() { 14 | Register(newSvn, "svn", "subversion") 15 | } 16 | 17 | type SVNDriver struct { 18 | Username string `json:"username"` 19 | Password string `json:"password"` 20 | } 21 | 22 | func newSvn(b []byte) (Driver, error) { 23 | var d SVNDriver 24 | 25 | if b != nil { 26 | if err := json.Unmarshal(b, &d); err != nil { 27 | return nil, err 28 | } 29 | } 30 | 31 | return &d, nil 32 | } 33 | 34 | func (g *SVNDriver) HeadRev(dir string) (string, error) { 35 | cmd := exec.Command( 36 | "svnversion") 37 | cmd.Dir = dir 38 | r, err := cmd.StdoutPipe() 39 | if err != nil { 40 | return "", err 41 | } 42 | defer r.Close() 43 | 44 | if err := cmd.Start(); err != nil { 45 | return "", err 46 | } 47 | 48 | var buf bytes.Buffer 49 | 50 | if _, err := io.Copy(&buf, r); err != nil { 51 | return "", err 52 | } 53 | 54 | return strings.TrimSpace(buf.String()), cmd.Wait() 55 | } 56 | 57 | func (g *SVNDriver) Pull(dir string) (string, error) { 58 | cmd := exec.Command( 59 | "svn", 60 | "update", 61 | "--ignore-externals", 62 | "--username", 63 | g.Username, 64 | "--password", 65 | g.Password) 66 | cmd.Dir = dir 67 | out, err := cmd.CombinedOutput() 68 | if err != nil { 69 | log.Printf("Failed to SVN update %s, see output below\n%sContinuing...", dir, out) 70 | return "", err 71 | } 72 | 73 | return g.HeadRev(dir) 74 | } 75 | 76 | func (g *SVNDriver) Clone(dir, url string) (string, error) { 77 | par, rep := filepath.Split(dir) 78 | cmd := exec.Command( 79 | "svn", 80 | "checkout", 81 | "--ignore-externals", 82 | "--username", 83 | g.Username, 84 | "--password", 85 | g.Password, 86 | url, 87 | rep) 88 | cmd.Dir = par 89 | out, err := cmd.CombinedOutput() 90 | if err != nil { 91 | log.Printf("Failed to checkout %s, see output below\n%sContinuing...", url, out) 92 | return "", err 93 | } 94 | 95 | return g.HeadRev(dir) 96 | } 97 | 98 | func (g *SVNDriver) SpecialFiles() []string { 99 | return []string{ 100 | ".svn", 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /vcs/svn_test.go: -------------------------------------------------------------------------------- 1 | package vcs 2 | 3 | import ( 4 | "testing" 5 | ) 6 | 7 | // Tests that the svn driver is able to parse its config. 8 | func TestSvnConfig(t *testing.T) { 9 | cfg := `{"username" : "svn_username", "password" : "svn_password"}` 10 | 11 | d, err := New("svn", []byte(cfg)) 12 | if err != nil { 13 | t.Fatal(err) 14 | } 15 | 16 | svn := d.Driver.(*SVNDriver) 17 | if svn.Username != "svn_username" { 18 | t.Fatalf("expected username of \"svn_username\", got %s", svn.Username) 19 | } 20 | 21 | if svn.Password != "svn_password" { 22 | t.Fatalf("expected password of \"svn_password\", got %s", svn.Password) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /vcs/vcs.go: -------------------------------------------------------------------------------- 1 | package vcs 2 | 3 | import ( 4 | "fmt" 5 | "log" 6 | "os" 7 | ) 8 | 9 | // A collection that maps vcs names to their underlying 10 | // factory. A factory allows the vcs to have unserialized 11 | // json config passed in to be parsed. 12 | var drivers = make(map[string]func(c []byte) (Driver, error)) 13 | 14 | // A "plugin" for each vcs that supports the very limited set of vcs 15 | // operations that hound needs. 16 | type Driver interface { 17 | 18 | // Clone a new working directory. 19 | Clone(dir, url string) (string, error) 20 | 21 | // Pull new changes from the server and update the working directory. 22 | Pull(dir string) (string, error) 23 | 24 | // Return the revision at the head of the vcs directory. 25 | HeadRev(dir string) (string, error) 26 | 27 | // Return a list of special filenames that should not be indexed. 28 | SpecialFiles() []string 29 | } 30 | 31 | // An API to interact with a vcs working directory. This is 32 | // what clients will interact with. 33 | type WorkDir struct { 34 | Driver 35 | } 36 | 37 | // Register a new vcs driver under 1 or more names. 38 | func Register(fn func(c []byte) (Driver, error), names ...string) { 39 | if fn == nil { 40 | log.Panic("vcs: cannot register nil factory") 41 | } 42 | 43 | for _, name := range names { 44 | drivers[name] = fn 45 | } 46 | } 47 | 48 | // Create a new WorkDir from the name and configuration data. 49 | func New(name string, cfg []byte) (*WorkDir, error) { 50 | f := drivers[name] 51 | if f == nil { 52 | return nil, fmt.Errorf("vcs: %s is not a valid vcs driver.", name) 53 | } 54 | 55 | d, err := f(cfg) 56 | if err != nil { 57 | return nil, err 58 | } 59 | 60 | return &WorkDir{d}, nil 61 | } 62 | 63 | func exists(path string) bool { 64 | if _, err := os.Stat(path); err != nil { 65 | return false 66 | } 67 | return true 68 | } 69 | 70 | // A utility method that carries out the common operation of cloning 71 | // if the working directory is absent and pulling otherwise. 72 | func (w *WorkDir) PullOrClone(dir, url string) (string, error) { 73 | if exists(dir) { 74 | return w.Pull(dir) 75 | } 76 | return w.Clone(dir, url) 77 | } 78 | -------------------------------------------------------------------------------- /vcs/vcs_test.go: -------------------------------------------------------------------------------- 1 | package vcs 2 | 3 | import ( 4 | "testing" 5 | ) 6 | 7 | // TODO(knorton): Write tests for the vcs interactions 8 | 9 | // Just make sure all drivers are tolerant of nil 10 | func TestNilConfigs(t *testing.T) { 11 | for name, _ := range drivers { 12 | d, err := New(name, nil) 13 | if err != nil { 14 | t.Fatal(err) 15 | } 16 | 17 | if d == nil { 18 | t.Fatalf("vcs: %s returned a nil driver", name) 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /web/web.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "sync" 7 | 8 | "github.com/itpp-labs/hound/api" 9 | "github.com/itpp-labs/hound/config" 10 | "github.com/itpp-labs/hound/searcher" 11 | "github.com/itpp-labs/hound/ui" 12 | ) 13 | 14 | // Server is an HTTP server that handles all 15 | // http traffic for hound. It is able to serve 16 | // some traffic before indexes are built and 17 | // then transition to all traffic afterwards. 18 | type Server struct { 19 | cfg *config.Config 20 | dev bool 21 | ch chan error 22 | 23 | mux *http.ServeMux 24 | lck sync.RWMutex 25 | } 26 | 27 | func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { 28 | if r.URL.Path == s.cfg.HealthCheckURI { 29 | fmt.Fprintln(w, "👍") 30 | return 31 | } 32 | 33 | s.lck.RLock() 34 | defer s.lck.RUnlock() 35 | if m := s.mux; m != nil { 36 | m.ServeHTTP(w, r) 37 | } else { 38 | http.Error(w, 39 | "Hound is not ready.", 40 | http.StatusServiceUnavailable) 41 | } 42 | } 43 | 44 | func (s *Server) serveWith(m *http.ServeMux) { 45 | s.lck.Lock() 46 | defer s.lck.Unlock() 47 | s.mux = m 48 | } 49 | 50 | // Start creates a new server that will immediately start handling HTTP traffic. 51 | // The HTTP server will return 200 on the health check, but a 503 on every other 52 | // request until ServeWithIndex is called to begin serving search traffic with 53 | // the given searchers. 54 | func Start(cfg *config.Config, addr string, dev bool) *Server { 55 | ch := make(chan error) 56 | 57 | s := &Server{ 58 | cfg: cfg, 59 | dev: dev, 60 | ch: ch, 61 | } 62 | 63 | go func() { 64 | ch <- http.ListenAndServe(addr, s) 65 | }() 66 | 67 | return s 68 | } 69 | 70 | // ServeWithIndex allow the server to start offering the search UI and the 71 | // search APIs operating on the given indexes. 72 | func (s *Server) ServeWithIndex(idx map[string]*searcher.Searcher) error { 73 | h, err := ui.Content(s.dev, s.cfg) 74 | if err != nil { 75 | return err 76 | } 77 | 78 | m := http.NewServeMux() 79 | m.Handle("/", h) 80 | api.Setup(m, idx, s.cfg) 81 | 82 | s.serveWith(m) 83 | 84 | return <-s.ch 85 | } 86 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | module.exports = { 4 | devServer: { 5 | port: 9000, 6 | host: '0.0.0.0', 7 | }, 8 | entry: { 9 | 'ui/js/hound.js': './ui/assets/js/hound.jsx', 10 | 'ui/js/excluded_files.js': './ui/assets/js/excluded_files.jsx', 11 | }, 12 | module: { 13 | rules: [ 14 | { 15 | test: /\.jsx?$/, 16 | exclude: /node_modules/, 17 | use: { 18 | loader: "babel-loader" 19 | } 20 | }, 21 | ] 22 | }, 23 | output: { 24 | filename: '[name]', 25 | path: path.resolve(__dirname, '.build') 26 | }, 27 | resolve: { 28 | extensions: ['.js', '.jsx'], 29 | } 30 | }; 31 | --------------------------------------------------------------------------------