├── .github └── workflows │ ├── ci.yml │ └── release.yml ├── .gitignore ├── Build.PL ├── Changes ├── MANIFEST.SKIP ├── README.md ├── bin ├── pgxn_api_server └── pgxn_api_sync ├── lib └── PGXN │ ├── API.pm │ └── API │ ├── Indexer.pm │ ├── Router.pm │ ├── Sync.pm │ └── index.html └── t ├── base.t ├── bin ├── testrsync └── testrsync.bat ├── data ├── kv-tag-updated.json ├── ordered-tag-updated.json ├── pair-ext-updated.json ├── pair-ext-updated2.json ├── pair-ext-updated3.json ├── pair-ext-updated4.json ├── pair-tag-updated.json ├── pair-tag-updated2.json ├── pair-updated.json ├── pair-updated2.json ├── rsync.out ├── theory-updated.json └── theory-updated2.json ├── docs.t ├── htmlin ├── basic.html ├── bulkload.html ├── headers.html ├── omnipitr.html ├── shiftjis.html ├── unwanted.html └── utf8.html ├── htmlout ├── basic.html ├── bulkload.html ├── headers.html ├── omnipitr.html ├── shiftjis.html ├── unwanted.html └── utf8.html ├── indexer.t ├── pod-coverage.t ├── pod-spelling.t ├── pod.t ├── root ├── dist │ ├── pair.json │ ├── pair │ │ ├── 0.1.0 │ │ │ ├── META.json │ │ │ └── pair-0.1.0.zip │ │ ├── 0.1.1 │ │ │ ├── META.json │ │ │ ├── README.txt │ │ │ └── pair-0.1.1.zip │ │ └── 0.1.2 │ │ │ ├── META.json │ │ │ └── pair-0.1.2.zip │ ├── pgTAP.json │ └── pgTAP │ │ └── 0.25.0 │ │ └── META.json ├── extension │ ├── pair.json │ └── pgtap.json ├── index.json ├── meta │ ├── mirrors.json │ ├── spec.txt │ └── timestamp ├── tag │ ├── key value.json │ ├── ordered pair.json │ └── pair.json └── user │ ├── fred.json │ └── theory.json ├── router.t └── sync.t /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: 🧪 CI 2 | on: 3 | push: 4 | branches: ['*'] 5 | jobs: 6 | build: 7 | strategy: 8 | matrix: 9 | os: [[🐧, ubuntu], [🍎, macos]] # [🪟, windows] 10 | perl: [ '5.38', '5.36', '5.34', '5.32', '5.30', '5.28', '5.26', '5.24', '5.22', '5.20', '5.18', '5.16', '5.14' ] 11 | name: 🧅 Perl ${{ matrix.perl }} on ${{ matrix.os[0] }} ${{ matrix.os[1] }} 12 | runs-on: ${{ matrix.os[1] }}-latest 13 | steps: 14 | - uses: actions/checkout@v4 15 | - name: Setup Perl ${{ matrix.perl }} 16 | uses: shogo82148/actions-setup-perl@v1 17 | with: { perl-version: "${{ matrix.perl }}" } 18 | 19 | - name: Brew CommonMark 20 | if: runner.os == 'macOS' 21 | run: | 22 | brew install cmark 23 | cpanm -v --notest --no-man-pages CommonMark --configure-args="INC=-I'$(brew --prefix)/include' LIBS=-L'$(brew --prefix)/lib -lcmark'" 24 | 25 | - name: Apt CommonMark 26 | if: runner.os == 'Linux' 27 | run: | 28 | sudo apt-get install libcmark-dev 29 | cpanm -v --notest --no-man-pages CommonMark 30 | 31 | - name: Install Dependencies 32 | run: cpanm -vn Module::Build && cpanm -vn --installdeps --with-recommends --with-develop . 33 | - name: Run Tests 34 | run: perl Build.PL ./Build && ./Build test 35 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: 🚀 Release 2 | on: 3 | push: 4 | tags: [v*] 5 | jobs: 6 | release: 7 | name: Release on CPAN and GitHub 8 | runs-on: ubuntu-latest 9 | env: 10 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 11 | steps: 12 | - name: Check out the repo 13 | uses: actions/checkout@v4 14 | - name: Setup Perl 15 | uses: shogo82148/actions-setup-perl@v1 16 | - name: Install Release Dependencies 17 | run: cpanm -qn Module::Build CPAN::Uploader 18 | - name: Package the Release 19 | id: package 20 | run: perl Build.PL && ./Build manifest && ./Build dist && echo "tarball=$(./Build tarball_name )" >> $GITHUB_OUTPUT 21 | - name: Generate Release Changes 22 | run: ./Build latest_changes 23 | - name: Release on CPAN 24 | env: 25 | CPANUSER: ${{ secrets.CPAN_USERNAME }} 26 | CPANPASS: ${{ secrets.CPAN_PASSWORD }} 27 | run: cpan-upload --user "$CPANUSER" --password "$CPANPASS" '${{ steps.package.outputs.tarball }}' 28 | - name: Create GitHub Release 29 | uses: softprops/action-gh-release@v2 30 | with: 31 | name: Release ${{ github.ref_name }} 32 | body_path: latest_changes.md 33 | files: ${{ steps.package.outputs.tarball }} 34 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /_build 2 | /blib 3 | /PGXN-API-* 4 | /MANIFEST 5 | /MANIFEST.bak 6 | /*META.* 7 | /Build 8 | /www 9 | .vscode/ 10 | -------------------------------------------------------------------------------- /Build.PL: -------------------------------------------------------------------------------- 1 | use strict; 2 | use warnings; 3 | use Module::Build; 4 | 5 | my $class = Module::Build->subclass( 6 | class => 'PGXN::Build', 7 | code => q{ 8 | sub valid_licenses { { postgresql => 'PostgreSQL' } } 9 | sub ACTION_tarball_name { print shift->dist_dir . ".tar.gz\n" } 10 | sub ACTION_latest_changes { 11 | my $self = shift; 12 | (my $dv = $self->dist_version) =~ s/^v//; 13 | open my $in, '<:raw', 'Changes' or die "Cannot open Changes: $!\n"; 14 | open my $out, '>:raw', 'latest_changes.md' or die "Cannot open latest_changes.md: $!\n"; 15 | while (<$in>) { last if /^\Q$dv\E\b/ } 16 | print {$out} "Changes for v$dv\n"; 17 | while (<$in>) { 18 | last if /^\s*$/; 19 | chomp; 20 | if (s/^\s+-/- /) { 21 | print {$out} "\n"; 22 | } else { 23 | s/^\s+/ /; 24 | } 25 | print {$out} $_; 26 | } 27 | $self->add_to_cleanup('latest_changes.md'); 28 | } 29 | }, 30 | ); 31 | 32 | my $build = $class->new( 33 | module_name => 'PGXN::API', 34 | license => 'postgresql', 35 | script_files => 'bin', 36 | configure_requires => { 'Module::Build' => '0.4209' }, 37 | test_requires => { 38 | 'Test::Exception' => '0.31', 39 | 'Test::File' => '1.29', 40 | 'Test::File::Contents' => '0.20', 41 | 'Test::MockModule' => '0.05', 42 | 'Test::More' => '0.70', 43 | 'Test::Output' => '0.16', 44 | }, 45 | requires => { 46 | 'Archive::Zip' => '1.30', 47 | 'Cwd' => '3.33', 48 | 'CommonMark' => '0.290000', 49 | 'Data::Dump' => '1.17', 50 | 'Digest::SHA1' => '2.13', 51 | 'Email::MIME::Creator' => '1.905', 52 | 'Email::Sender::Simple' => '0.102370', 53 | 'File::Path' => '2.08', 54 | 'File::Copy::Recursive' => '0.38', 55 | 'File::Spec' => '3.33', 56 | 'JSON' => '2.27', 57 | 'JSON::XS' => '2.3', 58 | 'List::Util' => '1.23', 59 | 'List::MoreUtils' => '0.30', 60 | 'Lucy' => '0.2.1', 61 | 'Moose' => '1.15', 62 | 'Moose::Util::TypeConstraints' => '1.15', 63 | 'MooseX::Singleton' => '0.25', 64 | 'namespace::autoclean' => '0.11', 65 | 'perl' => 5.014, 66 | 'PGXN::API::Searcher' => '0.11.1', 67 | 'Plack' => '0.9977', 68 | 'Plack::App::Directory' => 0, 69 | 'Plack::App::File' => 0, 70 | 'Plack::Middleware::JSONP' => 0, 71 | 'Plack::Builder' => 0, 72 | 'Text::Markup' => '0.33', 73 | 'URI::Template' => '0.16', 74 | 'XML::LibXML' => '1.70', 75 | }, 76 | meta_merge => { 77 | 'meta-spec' => { version => 2 }, 78 | resources => { 79 | homepage => 'http://api.pgxn.org/', 80 | bugtracker => 'http://github.com/pgxn/pgxn-api/issues/', 81 | repository => 'http://github.com/pgxn/pgxn-api/', 82 | }, 83 | prereqs => { 84 | develop => { 85 | requires => { 86 | 'Test::Pod' => '1.41', 87 | 'Test::Pod::Coverage' => '1.06', 88 | 'Test::Spelling' => '0.25', 89 | }, 90 | }, 91 | }, 92 | }, 93 | ); 94 | 95 | $build->add_build_element('html'); 96 | $build->create_build_script; 97 | -------------------------------------------------------------------------------- /Changes: -------------------------------------------------------------------------------- 1 | Revision history for Perl extension PGXN::API 2 | 3 | 0.20.3 4 | 5 | 0.20.2 2024-03-14T23:04:19Z 6 | - Fixed symlinks extracted from Zip files and permission errors when 7 | re-indexing distributions. 8 | 9 | 0.20.1 2024-02-15T22:18:14Z 10 | - Fixed a bug where a testing extension's version and abstract was not 11 | properly indexed when there was no stable extension (and also unstable 12 | would not be indexed if there was no testing). 13 | - Fixed a bug where a user's JSON file was not updated for a testing 14 | release when there were no previous stable releases, or for an unstable 15 | release when there wer not previous testing releases. 16 | - The indexer now recognizes plain text files ending in `.txt` or `.text` 17 | as documentation files to be indexed. They're parsed by the default 18 | Text::Markup::None parser, which wraps their contents in a `
`
 19 |         block (#13).
 20 | 
 21 | 0.20.0  2024-02-09T17:36:10Z
 22 |       - Removed the `Capfile` and `eg` directory. Examples for managing PGXN
 23 |         can now be found in the pgxn/pgxn-ops GitHub repository.
 24 |       - Switched from Text::Markdown to CommonMark for parsing and formatting
 25 |         Markdown files (but not MultiMarkdown files). This allows code fences
 26 |         to work and generates nicer HTML in general, but is stricter about
 27 |         certain things.
 28 |       - The docs indexer now indexes a distribution's README if it is the only
 29 |         documentation it finds in the distribution (#12).
 30 |       - The docs indexer now strictly links an extension to the doc file
 31 |         specified via the `docfile` key in its `provides` object, even if
 32 |         it's a README (#10).
 33 |       - The indexer will now index a testing release if there are no stable
 34 |         releases, and will index an unstable release if there are neither
 35 |         stable nor testing releases (#2).
 36 |       - Updated the SemVer regex when parsing rsync output to the official
 37 |         version published in https://regex101.com/r/vkijKf/ (#16).
 38 |       - Fix unzipping of distributions to ensure that all directories are
 39 |         readable and executable but not writeable by all, and that files are
 40 |         only readable by all (#15).
 41 |       - Dropped support for Perl 5.10 and 5.12.
 42 | 
 43 | 0.16.5  2016-06-22T18:03:05Z
 44 |       - Fixed a test failure on systems with a non-English locale, thanks to
 45 |         Slaven Rezić (issue #25).
 46 |       - Fixed test failures on Windows.
 47 | 
 48 | 0.16.4  2016-06-15T16:10:33Z
 49 |       - Fixed a bug where v1.0.0 Semantic Versions were not matched in synced
 50 |         file names and directories.
 51 |       - Fixed a bug that tried to read an extension JSON file case-sensitively.
 52 |       - Fixed test failures due to a change in the generation of JSONP
 53 |         responses in Plack 1.0031.
 54 |       - Fixed a bug where the "pgxnbod" ID of a cleaned HTML element could be
 55 |         removed if the last element in the body was removed.
 56 | 
 57 | 0.16.3  2013-06-19T06:31:51Z
 58 |       - Updated URI::Template to v0.16 for its support for `{+var}` and
 59 |         dropped the nasty hack required to work around the lack of it in
 60 |         earlier versions.
 61 |       - Updated GitHub URLs to point to `/pgxn/` rather than `/theory/`.
 62 |       - Fixed a bug on the HTML documentation table of contents generator
 63 |         where it would choke on finding an `h1` after an `h3`.
 64 |       - Fixed test failures due to a change in the API of Email::Sender.
 65 |       - Removed the Pod tests from the distribution.
 66 |       - Eliminated smartmatch warnings on Perl 5.18.
 67 | 
 68 | 0.16.2  2012-02-14T06:46:47Z
 69 |       - Empty Documentation files are no longer indexed or displayed.
 70 |       - All files extracted from distribution zip files into the source
 71 |         directory are now globally readable. This behavior eliminates
 72 |         permissions errors when the user that serves the files is different
 73 |         than the one that unzipped them.
 74 |       - Files specified in the `docfile` key are now ignored if Text::Markup
 75 |         does not recognize them, since they cannot then be parsed for serving
 76 |         as HTML.
 77 |       - Worked around an [Archive::Zip
 78 |         bug](https://rt.cpan.org/Ticket/Display.html?id=74255.) when reading
 79 |         the README file for indexing.
 80 | 
 81 | 0.16.1  2012-01-10T04:30:02Z
 82 |       - Added `version_string` class method to PGXN::API. It should always be
 83 |         used for displaying the version.
 84 |       - Added `Capfile` for capistrano deployment.
 85 |       - Added `eg/debian_init`, an example init script for Debian that runs
 86 |         PGXN::API on Starman.
 87 |       - Added `eg/pgxn_sync`, an example shell script that syncs from a local
 88 |         master mirror.
 89 | 
 90 | 0.16.0  2011-08-26T17:29:00Z
 91 |       - Eliminated "Use of uninitialized value in subroutine entry" warning
 92 |         when indexing a user with no associated URI.
 93 |       - If an version of an extension is deleted from the mirror extension
 94 |         JSON file, it will now also be deleted from the API doc root's
 95 |         extension JSON file (Issue #8).
 96 |       - Requiring Text::Markup 0.13, as it has an important fix for parsing
 97 |         Pod.
 98 |       - Upgraded from KinoSearch to Lucy.
 99 | 
100 | 0.15.0  2011-05-12T18:40:43
101 |       - Changed the JSONP callback parameter from "jsonp" to "callback".
102 | 
103 | 0.14.1  2011-05-12T18:11:02
104 |       - Changed mentions of `pgxn_apid` to the correct `pgxn_api_server` in
105 |         `pgxn_api_server` docs.
106 |       - Backported to support Perl 5.10.
107 | 
108 | 0.14.0  2011-05-03T00:09:13
109 |       - Now require Plack 0.9977 to enable JSONP for all JSON file requests.
110 |       - Fixed test failures on systems where `rsync` is not in the path.
111 |       - Replaced `pgxn_api.psgi` with `pgxn_api_server`. The latter uses
112 |         proper command-line processing and includes all of the functionality
113 |         of `plackup`.
114 | 
115 | 0.13.0  2011-04-30T20:40:21
116 |       - The "name" and "user_name" fields are now full-text indexed in the
117 |         "docs", "dists", and "extensions" indexes. This will allow a search
118 |         such as `user:theory` to return results.
119 | 
120 | 0.12.9  2011-04-30T20:01:29
121 |       - Fixed a test failure in `t/indexer.t` on case-sensitive file systems.
122 |       - Removed use of `env perl` in the `t/bin/testrsync` script. Just use
123 |         `/usr/bin/perl -w` instead and let it be any Perl, not just 5.12.
124 |         Should prevent failures on systems that don't allow arguments to `env`
125 |         and where the system Perl is different than the installing Perl.
126 |       - Added Data::Dump to list of requirements. Was inadvertently omitted.
127 | 
128 | 0.12.8  2011-04-29T21:21:13
129 |       - Some documentation tweaks.
130 |       - First release to CPAN.
131 | 
132 | 0.12.7  2011-04-28T22:28:31
133 |       - Fixed a bug where an updated root `index.json` updated on the mirror
134 |         was not likewise getting updated in the API root.
135 |       - Moved the default `index.html` file so that it will be installed and
136 |         findable when PGXN::API is installed from CPAN.
137 |       - Added a link to the API docs to the default `index.html` file.
138 |       - Renamed the `{char}` variable in the `userlist` URI template to
139 |         `{letter}`, since it better describes the value (an ASCII letter a-z).
140 |       - Search parameters must be submitted via a GET request.
141 | 
142 | 0.12.6  2011-04-27T18:52:18
143 |       - Eliminated "Use of uninitialized variable in subroutine" warning when
144 |         indexing extensions with no abstract.
145 |       - Allow an optional trailing backslash on search URLs (since the
146 |         template includes one).
147 |       - Add new "special files" to detect: INSTALL, LICENSE, COPYING, and
148 |         AUTHORS.
149 |       - Changed search results content type from `text/json` to
150 |         `application/json`.
151 |       - Enabled JSONP for search requests.
152 |       - Changed tests to use `.zip` ending for downloads instead of `.pgz`.
153 | 
154 | 0.12.5  2011-04-25T18:07:27
155 |       - The directory name in which browseable distribution source files are
156 |         unzipped is now always lowercase.
157 |       - Fixed the primary key in the for distributions, docs and extension in
158 |         the search index to always be lowercased.
159 |       - Distribution names and versions are now always lowercase in verbose
160 |         output.
161 | 
162 | 0.12.4  2011-04-25T16:59:36
163 |       - The prefix in zip files is now assumed to be lowercase, as
164 |         PGXN::Manager v0.12.4 enforces.
165 | 
166 | 0.12.3  2011-04-25T16:35:28
167 |       - When searching for "special files", the indexer now looks for files
168 |         ending in ".in" if it doesn't find them without the ".in". Suggested
169 |         by Daniele Varrazzo.
170 |       - Always user lowercase distribution names, versions, extension names,
171 |         tags, and nicknames in in URI templates. They're still displayed with
172 |         case preserved inside files and the index.
173 | 
174 | 0.12.2  2011-04-22T18:51:42
175 |       - Changed the media type of many more files served from `src` to
176 |         text/plain. This is both for security reasons and so that files that
177 |         are plain text open in the browser rather than prompt to download.
178 | 
179 | 0.12.1  2011-04-20T23:41:26
180 |       - Fixed rsync log parsing to properly detect when a file has been
181 |         changed. Previously, the code was only noticing new files.
182 | 
183 | 0.12.0  2011-04-20T05:35:52
184 |       - Requests should now all be returned with an appropriate
185 |         `Content-Length` header.
186 |       - Added the `userlist` URI template and the code to generate the user
187 |         lists to Indexer. It's set off at the end of a sync.
188 |       - Now update user JSON files whenever they're updated on the mirror.
189 |         Previously, a user JSON file was only updated when the user released a
190 |         distribution. But now user JSON files can be updated independently, and
191 |         for users with no distributions at all. So we notice updated user JSON
192 |         files and properly merge them into the doc root and update them in the
193 |         full text index.
194 |       - Renamed the `doc` URI template to `htmldoc`, in case we ever decide to
195 |         offer other doc formats via the API.
196 |       - Renamed the path to a doc from `doc` to `docpath` in the extension
197 |         JSON and the `provides` hashes. This better distinguishes it from the
198 |         `docfile` key in the `provides` section of the meta spec. The
199 |         `htmldoc` URI template gets its variable renamed to `docpath`, as
200 |         well.
201 | 
202 | 0.11.0  2011-04-13T02:25:37
203 |       - Fixed the search API to throw a 400 for invalid queries: Missing
204 |         query, or a query with a value of "*" or "?".
205 |       - Changed the search URI template from "/search" to "/search/{in}/", as
206 |         the thing to be searched is required in the URI.
207 |       - Made the regular expression matching stats URIs stricter. It now
208 |         matches `(?:dist|tag|user|extension|summary)` rather than `[^/]+?`.
209 |       - Added a `.html` version of the spec to the API doc root, parsed from
210 |         the MultiMarkdown `.txt` file copied from the mirror.
211 | 
212 | 0.10.0  2011-04-11T23:41:16
213 |       - Removed PGXN::API::Stats. The same data is now generated by
214 |         PGXN::Manager 0.11.0.
215 |       - Updated sync to properly copy the new stats files from the mirror root,
216 |         as well as the new spec file.
217 |       - Updated sync to update the root `index.json` only if it has changed.
218 |         It will of course also be created on the first sync to a new doc root.
219 |       - Changed sync to load templates from the mirror root instead of relying
220 |         on those loaded from the doc root by PGXN::API. This is because the
221 |         latter won't exist before we start parsing the `rsync` log file, and
222 |         may need updating even if it does exist.
223 | 
224 | 0.9.0   2011-04-06T04:18:53
225 |       - Requests for a dist, extension, user, or tag URI ending in "/" instead
226 |         or ".json" (as in "/tag/pair/" instead of "/tag/pair.json") now
227 |         returns the JSON file.
228 |       - Added PGXN::API::Stats. It's used during sync to generate stats for
229 |         all the objects managed by the API (distributions, users, extensions,
230 |         and tags).
231 | 
232 | 0.8.0   2011-04-02T20:32:02
233 |       - Documentation and README documents now have their white space
234 |         normalized to single spaces before indexing. This allows highlighting
235 |         to work better in result sets. Suggested by Marvin Humphrey.
236 |       - The PGXN::API::Searcher object used by the router is now maintained
237 |         across requests, rather than created a new for every call to the
238 |         /search API. This is because PGXN::API::Searcher v0.7.1 properly
239 |         reconnects in the event of a changed index, so we don't have to worry
240 |         about it here.
241 |       - Fixed the `uri` and `email` fields in the user search index so that
242 |         they are both stored *and* indexed. They were previously just indexed,
243 |         and so might be missing from search results.
244 | 
245 | 0.7.0   2011-03-31T20:55:51
246 |       - Added "search" URI template to the API server's `/index.json`.
247 |       - Added full documentation.
248 |       - The indexer now respects the `no_index` key in the meta file.
249 |         Specifically, it will ignore any documentation files that match the
250 |         no_index spec. They won't be indexed.
251 |       - Changed the default `mirror_root` subdirectory from "pgxn" to
252 |         "mirror".
253 |       - Added query parameter validation to the `/search` API. Any invalid
254 |         parameters now return 400.
255 |       - Restored required index specification to the search API path, rather
256 |         than a query parameter. So `/search` is now a 404. Use `/search/docs`
257 |         instead. Can also use `/search/dists`, `/search/extensions`,
258 |         `/search/users`, and `/search/tags`.
259 |       - To be in sync with the above, the search indexes now have plural names
260 |         rather than singular.
261 |       - Fixed "Cannot copy directory" errors in tests when run from a
262 |         distribution.
263 | 
264 | 0.6.9   2011-03-29T04:43:53
265 |       - Fixed option processing in `pgxn_api.psgi` so that it deals only with
266 |         the options we care about. Otherwise it fails to start when running
267 |         under Starman.
268 | 
269 | 0.6.8   2011-03-29T04:18:30
270 |       - Added X-PGXN-API-Version header to all HTTP responses.
271 |       - Added required `errors_to` and `errors_from` parameters to the router.
272 |       - Added required `errors_to` and `errors_from` options to
273 |         `pgxn_api.psgi`, as well as the `doc_root` option.
274 |       - Added the `--root` option to `pgxn_api_sync`.
275 | 
276 | 0.6.7   2011-03-29T02:18:01
277 |       - Moved the doc path in the extension JSON file under the release status
278 |         key.
279 | 
280 | 0.6.6   2011-03-29T01:13:48
281 |       - Changed `{+path}` in the doc URI template to `{+doc}`, since that
282 |         will be more meaningful in other contexts.
283 |       - Added the `doc` key to the `provides` hashes in the distribution
284 |         metadata. It contains the doc path (as used by the doc URI template)
285 |         to the doc with the same base name as the provided extension.
286 |       - Added the `doc` key to the extension search index, so it's easy to
287 |         link to the documentation page from extension search results.
288 | 
289 | 0.6.5   2011-03-28T03:57:52
290 |       - The search API no longer fails when only "q" query parameter is
291 |         passed.
292 |       - Added a 500 error handler that sends an email on error.
293 | 
294 | 0.6.4   2011-03-28T03:07:13
295 |       - Updated for PGXN::API::Searcher v0.6.1. 
296 |       - Now only one search URL, `/search`. The index is specified via the
297 |         `in` query parameter.
298 | 
299 | 0.6.3   2011-03-28T02:08:01
300 |       - Updated for new URI template names in PGXN::Manager v0.10.4.
301 |       - Updated tests for new default metadata URI template paths.
302 | 
303 | 0.6.2   2011-03-26T05:29:36
304 |       - Search requests go to /search instead of /by. The latter is used for
305 |         metadata files.
306 |       - Fixed another "Use of uninitialized value in subroutine entry" warning
307 |         from the indexer.
308 | 
309 | 0.6.1   2011-03-26T04:12:42
310 |       - Fixed error in indexer for distributions with no tags.
311 |       - Fixed "Use of uninitialized value in subroutine entry" warnings from
312 |         the full text indexer in PGXN::API::Indexer.
313 |       - Fixed user name in distribution full text index so that it's not the
314 |         same name for all users.
315 | 
316 | 0.6.0   2011-03-26T03:14:12
317 |       - The /src directory is now served with Plack::App::Directory, so that a
318 |         listing of the files in a directory will be presented. Elsewhere,
319 |         directory requests return a 404.
320 |       - Added full-text search indexing. Requests go to /by.
321 | 
322 | 0.5.5   2011-03-17T23:07:43
323 |       - Changed doc JSON to have a hash for each value: a title and an
324 |         abstract (if there is one).
325 | 
326 | 0.5.4   2011-03-17T00:08:30
327 |       - Updated to work with new default URI templates deployed in
328 |         PGXN::Manager 0.10.2.
329 | 
330 | 0.5.3  2011-03-16T03:16:27
331 |       - Changed doc template to use `{+path}` instead of `{doc}`, so that
332 |         slashes will not be encoded. This isn't strictly-speaking supported by
333 |         URI::Template yet, so for now we set the variable as `+path` and
334 |         prevent URI::Escape from escaping slashes.
335 | 
336 | 0.5.2   2011-03-15T22:23:56
337 |       - HTML documents are now properly written out at UTF-8, instead of as
338 |         raw bytes.
339 |       - The doc parser no longer autovivifies doc file names into the
340 |         "provides" has of the version metadata file.
341 | 
342 | 0.5.1   2011-03-15T21:44:07
343 |       - The sync code now updates the mirror meta after `rsync`ing but before
344 |         doing anything else. It was fetching a template first, and this caused
345 |         errors on the first sync because the `index.json` had not yet been
346 |         copied to the document root.
347 | 
348 | 0.5.0   2011-03-15T21:24:59
349 |       - The URI templates are now read from the doc root rather than the
350 |         mirror root, so that the API-specific templates stored there will be
351 |         visible to the app.
352 |       - Added documentation parsing. Docs are read by Text::Markup and
353 |         converted to HTML. The HTML is cleaned up, unwanted stuff removed, and
354 |         a table of contents added. They are then written to a file represented
355 |         by the new "doc" URI template. They are also listed in the versioned
356 |         distribution metadata, suitable for for formatting into a URI via the
357 |         template. README files are included in this formatting treatment.
358 | 
359 | 0.4.1   2011-03-11T23:42:38
360 |       - Fixed a bug introduced in 0.4.0 where the /by/user and /by/tag files
361 |         were overwriting release data from other distributions, thereby
362 |         removing their abstracts. We now update the release data only for the
363 |         currently-processing distribution.
364 | 
365 | 0.4.0   2011-03-11T23:23:12
366 |       - Added the "special_files" key to the distribution metadata file. It
367 |         includes paths to `README`, `META.json`, `Changes`, `Makefile`, and
368 |         other special files in the distribution.
369 |       - Moved unzipped source directories under a subdirectory named for the
370 |         distribution. That is, rather than `/src/pair-0.1.1/`, we now have
371 |         `/src/pair/pair-0.1.1/`.
372 |       - Make sure that the `/by/dist` file has the latest *stable* version
373 |         listed as the version. Only make it a testing version if there is no
374 |         stable version, and an unstable version only if there is no stable or
375 |         testing version.
376 |       - Fixed bugs in release data merging in the `/by/user` and `/by/tag`
377 |         files. The old code had relied too much on what was in the
378 |         distribution. Since the only additional data point over the mirrored
379 |         original is an abstract, we can just rely on the mirror data to be
380 |         correct and add the abstract. This simplifies the code quite a bit, in
381 |         fact.
382 | 
383 | 0.3.2   2011-03-10T21:53:17
384 |       - Updated to work with PGXN::Manager 0.10.0 output. In particular,
385 |         "owners" are now known as "users" and "release_date" is now known as
386 |         "date". The relevant methods and data have been changed to reflect
387 |         this change.
388 | 
389 | 0.3.1 2011-03-10T00:28:36
390 |       - Updated to work with PGXN::Manager 0.9.1 output.
391 | 
392 | 0.3.0 2011-03-09T22:18:16
393 |       - Updated to work with PGXN::Manager 0.9.0 output.
394 | 
395 | 0.2.0   2011-03-03T01:09:42
396 |       - No longer serve directory indexes.
397 |       - Added tests for the router.
398 |       - Serve distribution files with content-type application/zip.
399 |       - Added `var/index.html`, placeholder for a root-level HTML file.
400 | 
401 | 0.1.0   2011-03-02 23:58:41
402 |       - Initial version.
403 |       - Syncs to mirror.
404 |       - Merges metadata to API doc root.
405 |       - Has PSGI to serve static files from the doc root.
406 | 


--------------------------------------------------------------------------------
/MANIFEST.SKIP:
--------------------------------------------------------------------------------
 1 | # Avoid version control files.
 2 | \bRCS\b
 3 | \bCVS\b
 4 | ,v$
 5 | \B\.svn\b
 6 | \B\.git
 7 | \.vscode
 8 | 
 9 | # Avoid Makemaker generated and utility files.
10 | \bMakefile$
11 | \bblib
12 | \bMakeMaker-\d
13 | \bpm_to_blib$
14 | \bblibdirs$
15 | ^MANIFEST\.SKIP$
16 | ^MYMETA
17 | 
18 | # Avoid Module::Build generated and utility files.
19 | \bBuild$
20 | \b_build
21 | 
22 | # Avoid temp and backup files.
23 | ~$
24 | \.tmp$
25 | \.old$
26 | \.bak$
27 | \#$
28 | \b\.#
29 | 
30 | # Avoid build files.
31 | ^PGXN-API
32 | ^Capfile
33 | ^www
34 | 
35 | # Avoid Pod tests.
36 | t/pod.+
37 | 


--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
 1 | PGXN/API
 2 | ========
 3 | 
 4 | This application provides a REST API for flexible searching of PGXN distribution
 5 | metadata and documentation. See [the docs](https://github.com/pgxn/pgxn-api/wiki)
 6 | for details on using the API.
 7 | 
 8 | Installation
 9 | ------------
10 | 
11 | To install this module, type the following:
12 | 
13 |     perl Build.PL
14 |     ./Build
15 |     ./Build test
16 |     ./Build install
17 | 
18 | Dependencies
19 | ------------
20 | 
21 | PGXN-API requires Perl 5.14 and the following modules:
22 | 
23 | *   Archive::Zip
24 | *   Cwd
25 | *   CommonMark
26 | *   Data::Dump
27 | *   Digest::SHA1
28 | *   Email::MIME::Creator
29 | *   Email::Sender::Simple
30 | *   File::Path
31 | *   File::Copy::Recursive
32 | *   File::Spec
33 | *   JSON
34 | *   JSON::XS
35 | *   List::Util
36 | *   List::MoreUtils
37 | *   Lucy
38 | *   Moose
39 | *   Moose::Util::TypeConstraints
40 | *   MooseX::Singleton
41 | *   namespace::autoclean
42 | *   PGXN::API::Searcher
43 | *   Plack
44 | *   Plack::App::Directory
45 | *   Plack::App::File
46 | *   Plack::Middleware::JSONP
47 | *   Plack::Builder
48 | *   Text::Markup
49 | *   URI::Template
50 | *   XML::LibXML
51 | 
52 | Copyright and License
53 | ---------------------
54 | 
55 | Copyright (c) 2011-2024 David E. Wheeler.
56 | 
57 | This module is free software; you can redistribute it and/or modify it under
58 | the [PostgreSQL License](http://www.opensource.org/licenses/postgresql).
59 | 
60 | Permission to use, copy, modify, and distribute this software and its
61 | documentation for any purpose, without fee, and without a written agreement is
62 | hereby granted, provided that the above copyright notice and this paragraph
63 | and the following two paragraphs appear in all copies.
64 | 
65 | In no event shall David E. Wheeler be liable to any party for direct,
66 | indirect, special, incidental, or consequential damages, including lost
67 | profits, arising out of the use of this software and its documentation, even
68 | if David E. Wheeler has been advised of the possibility of such damage.
69 | 
70 | David E. Wheeler specifically disclaims any warranties, including, but not
71 | limited to, the implied warranties of merchantability and fitness for a
72 | particular purpose. The software provided hereunder is on an "as is" basis,
73 | and David E. Wheeler has no obligations to provide maintenance, support,
74 | updates, enhancements, or modifications.
75 | 


--------------------------------------------------------------------------------
/bin/pgxn_api_server:
--------------------------------------------------------------------------------
  1 | #!/usr/local/bin/perl -w
  2 | 
  3 | use v5.14;
  4 | use utf8;
  5 | use PGXN::API::Router;
  6 | use Plack::Runner;
  7 | use Getopt::Long;
  8 | 
  9 | Getopt::Long::Configure( qw(bundling pass_through) );
 10 | 
 11 | my %opts;
 12 | 
 13 | GetOptions(
 14 |     'errors-to=s'   => \$opts{errors_to},
 15 |     'errors-from=s' => \$opts{errors_from},
 16 |     'doc-root=s'    => \$opts{doc_root},
 17 |     'help|h'        => \my $help,
 18 |     'man|m'         => \my $man,
 19 |     'version|v'     => \my $version,
 20 | ) or _pod2usage();
 21 | 
 22 | _pod2usage(
 23 |     ( $man ? ( '-sections' => '.+' ) : ()),
 24 |     '-exitval' => 0,
 25 | ) if $help or $man;
 26 | 
 27 | if ($version) {
 28 |     require File::Basename;
 29 |     say File::Basename::basename($0), ' ', PGXN::API->version_string;
 30 |     exit;
 31 | }
 32 | 
 33 | # Check required options.
 34 | if (my @missing = map {
 35 |     ( my $opt = $_ ) =~ s/_/-/g;
 36 |     "--$opt";
 37 | } grep { !$opts{$_} } qw(errors_to errors_from)) {
 38 |     my $pl = @missing > 1 ? 's' : '';
 39 |     my $last = pop @missing;
 40 |     my $disp = @missing ? join(', ', @missing) . (@missing > 1 ? ',' : '')
 41 |         . " and $last" : $last;
 42 |     _pod2usage( '-message' => "Missing required $disp option$pl" );
 43 | }
 44 | 
 45 | my $runner = Plack::Runner->new;
 46 | $runner->parse_options(@ARGV);
 47 | $runner->run(PGXN::API::Router->app(%opts));
 48 | 
 49 | sub _pod2usage {
 50 |     require Pod::Usage;
 51 |     Pod::Usage::pod2usage(
 52 |         '-verbose'  => 99,
 53 |         '-sections' => '(?i:(Usage|Options))',
 54 |         '-exitval'  => 1,
 55 |         @_
 56 |     );
 57 | }
 58 | 
 59 | =head1 Name
 60 | 
 61 | pgxn_api_server - The PGXN API server
 62 | 
 63 | =head1 Usage
 64 | 
 65 |   pgxn_api_server --doc-root    /var/www/api \
 66 |             --errors-from oops@example.com \
 67 |             --errors-to   alerts@example.com
 68 | 
 69 | =head1 Description
 70 | 
 71 | This is the PGXN API PSGI server. It automatically figures out the environment
 72 | it is run in, and runs your application in that environment. C,
 73 | C, C and others can all be detected. See L for
 74 | the authoritative list.
 75 | 
 76 | =head1 Options
 77 | 
 78 |      --errors-to   EMAIL   Email to which error messages should be sent.
 79 |      --errors-from EMAIL   Email to use on the From line of error messages.
 80 |      --doc-root    PATH    Location of the API document root.
 81 |   -o --host                Bind TCP daemon to the specified interface.
 82 |   -p --port                Listen on the specified TCP port.
 83 |   -s --server              Plack server to use.
 84 |   -S --socket              UNIX domain socket path to listen on.
 85 |   -l --listen              Address to listen on.
 86 |   -D --daemonize           Make the process go to the background.
 87 |   -I               PATH    Specify a Perl include library path.
 88 |   -E --env         ENV     Run with the specified environment.
 89 |   -r --reload              Reload when a development file changes.
 90 |   -R --Reload      PATH    Reload when a file in the specified path changes.
 91 |   -L --loader      LOADER  Run with specified loading class.
 92 |      --access-log  PATH    Write access log to the specified file.
 93 |   -h --help                Print a usage statement and exit.
 94 |   -m --man                 Print the complete documentation and exit.
 95 |   -v --version             Print the version number and exit.
 96 | 
 97 | =head1 Option Details
 98 | 
 99 | =over
100 | 
101 | =item C<--errors-to>
102 | 
103 | Specify an email address to which to send error information. Whenever the
104 | server encounters a 500 error, it will send detailed information, including a
105 | stack trace, to this address.
106 | 
107 | =item C<--errors-from>
108 | 
109 | The email address from which error messages should appear to be sent.
110 | 
111 | =item C<--doc-root>
112 | 
113 | The path to use for the API document root. Should be the same path as that
114 | managed by L.
115 | 
116 | =item C<-o>, C<--host>
117 | 
118 | The interface a TCP based server daemon binds to. Defaults to C, which
119 | lets most server back ends bind the any (C<*>) interface. This option does
120 | nothing if the server does not support TCP socket.
121 | 
122 | =item C<-p>, C<--port>
123 | 
124 | The port number a TCP based server daemon listens on. Defaults to 5000. This
125 | option does nothing if the server does not support TCP socket.
126 | 
127 | =item C<-s>, C<--server>
128 | 
129 | Select a specific implementation to run on using the C
130 | environment variable or use the C<-s> or C<--server> flag which will be
131 | preferred over the environment variable.
132 | 
133 | =item C<-S>, C<--socket>
134 | 
135 | UNIX domain socket path to listen on. Defaults to C. This option does
136 | nothing if the server doesn't support UNIX sockets.
137 | 
138 | =item C<-l>, C<--listen>
139 | 
140 | Addresses on which to listen. It could be C, C<:PORT> or C
141 | (without colons). It could be multiple but it depends on the server
142 | implementations whether multiple interfaces are supported.
143 | 
144 | =item C<-D>, C<--daemonize>
145 | 
146 | Makes the process go background. It's up to the back end server/handler
147 | implementation whether this option is respected or not.
148 | 
149 | =item C<-I>
150 | 
151 | Specify perl library include path, like C's -I option.
152 | 
153 | =item C<-E>, C<--env>
154 | 
155 | Specify the environment option (default is C). You can set this
156 | value by setting C environment variable as well, and specifying the
157 | value with the command line options writes back to C as well, so
158 | the API server can tell which environment setting the application is running
159 | on.
160 | 
161 |   # These two are the same
162 |   pgxn_api_server -E deployment
163 |   env PLACK_ENV=deployment pgxn_api_server
164 | 
165 | The value can be anything but commonly used ones are C,
166 | C and C.
167 | 
168 | If it's set to C, following middleware components are enabled by
169 | default: C, C and C.
170 | 
171 | =item C<-r>, C<--reload>
172 | 
173 | Watch for updates in your development directory and restart the server
174 | whenever a file is updated. This option by default watches the C
175 | directory. Use C<-R> if you want to watch other directories.
176 | 
177 | =item C<-R>, C<--Reload>
178 | 
179 | C<-R> option allows you to specify the path to watch file updates separated by
180 | comma (C<,>).
181 | 
182 |   pgxn_api_server -R /path/to/project/lib,/path/to/project/templates
183 | 
184 | =item C<-L>, C<--loader>
185 | 
186 | Specify the server loading subclass that implements how to run the server.
187 | Available options are I (default), C (automatically
188 | set when C<-r> or C<-R> is used), I and I.
189 | 
190 | See L and L when to
191 | use those loader types.
192 | 
193 | =item C<--access-log>
194 | 
195 | Specify the path to a file where the access log should be written. By default,
196 | in the development environment access logs will go to C.
197 | 
198 | =item C<-h> C<--help>
199 | 
200 | Outputs usage information and exits.
201 | 
202 | =item C<-m> C<--man>
203 | 
204 | Outputs this full documentation and exits.
205 | 
206 | =item C<-v> C<--version>
207 | 
208 | Outputs the version and exits.
209 | 
210 | =back
211 | 
212 | Other options that starts with C<--> are passed through to the back end
213 | server. See the appropriate L back end documentation to see
214 | which options are available.
215 | 
216 | =head1 See Also
217 | 
218 | =over
219 | 
220 | =item * L
221 | 
222 | =item * L
223 | 
224 | =item * L
225 | 
226 | =item * L
227 | 
228 | =back
229 | 
230 | =head1 Author
231 | 
232 | David E. Wheeler 
233 | 
234 | =head1 Copyright and License
235 | 
236 | Copyright (c) 2011-2024 David E. Wheeler.
237 | 
238 | This module is free software; you can redistribute it and/or modify it under
239 | the L.
240 | 
241 | Permission to use, copy, modify, and distribute this software and its
242 | documentation for any purpose, without fee, and without a written agreement is
243 | hereby granted, provided that the above copyright notice and this paragraph
244 | and the following two paragraphs appear in all copies.
245 | 
246 | In no event shall David E. Wheeler be liable to any party for direct,
247 | indirect, special, incidental, or consequential damages, including lost
248 | profits, arising out of the use of this software and its documentation, even
249 | if David E. Wheeler has been advised of the possibility of such damage.
250 | 
251 | David E. Wheeler specifically disclaims any warranties, including, but not
252 | limited to, the implied warranties of merchantability and fitness for a
253 | particular purpose. The software provided hereunder is on an "as is" basis,
254 | and David E. Wheeler has no obligations to provide maintenance, support,
255 | updates, enhancements, or modifications.
256 | 
257 | =cut
258 | 


--------------------------------------------------------------------------------
/bin/pgxn_api_sync:
--------------------------------------------------------------------------------
  1 | #!/usr/local/bin/perl -w
  2 | 
  3 | use v5.14;
  4 | use utf8;
  5 | use PGXN::API::Sync;
  6 | use Getopt::Long;
  7 | use Carp;
  8 | Getopt::Long::Configure( qw(bundling) );
  9 | 
 10 | BEGIN {
 11 |     $SIG{__WARN__} = \&Carp::cluck;
 12 |     $SIG{__DIE__}  = \&Carp::confess;
 13 | }
 14 | 
 15 | GetOptions(
 16 |     'rsync|r=s'  => \my $rsync,
 17 |     'root|d=s'   => \my $root,
 18 |     'verbose|V+' => \my $verbose,
 19 |     'help|h'     => \my $help,
 20 |     'man|m'      => \my $man,
 21 |     'version|v'  => \my $version,
 22 | ) or _pod2usage();
 23 | 
 24 | _pod2usage(
 25 |     ( $man ? ( '-sections' => '.+' ) : ()),
 26 |     '-exitval' => 0,
 27 | ) if $help or $man;
 28 | 
 29 | if ($version) {
 30 |     require File::Basename;
 31 |     say File::Basename::basename($0), ' ', PGXN::API->version_string;
 32 |     exit;
 33 | }
 34 | 
 35 | my $source = shift or _pod2usage();
 36 | 
 37 | PGXN::API->instance->doc_root($root) if $root;
 38 | 
 39 | PGXN::API::Sync->new(
 40 |     source => $source,
 41 |     ($rsync ? (rsync_path => $rsync) : ()),
 42 |     verbose => $verbose || 0,
 43 | )->run;
 44 | 
 45 | sub _pod2usage {
 46 |     require Pod::Usage;
 47 |     Pod::Usage::pod2usage(
 48 |         '-verbose'  => 99,
 49 |         '-sections' => '(?i:(Usage|Options))',
 50 |         '-exitval'  => 1,
 51 |         @_
 52 |     );
 53 | }
 54 | 
 55 | =head1 Name
 56 | 
 57 | pgxn_api_sync - Sync the PGXN API server to a PGXN mirror
 58 | 
 59 | =head1 Usage
 60 | 
 61 |   pgxn_api_sync --root /path/to/api/doc/root \
 62 |                 --rsync /path/to/rsync \
 63 |                 rsync://master.pgxn.org/pgxn/
 64 | 
 65 | =head1 Description
 66 | 
 67 | This program syncs the PGXN API server to a PGXN mirror, unpacking all of the
 68 | synced distributions and indexing them for the API. It's meant to be run on a
 69 | cron job, so that the API server can be regularly updated with the most recent
 70 | additions to the network. For example, to run it hourly at 42 minutes past the
 71 | hour, put this in your crontab:
 72 | 
 73 |   * * * * 42 pgxn_api_sync rsync://master.pgxn.org/pgxn/
 74 | 
 75 | =head1 Options
 76 | 
 77 |   -r --rsync          Location of rsync if it's not in your path.
 78 |   -d --root           Directory to use for the API doc root.
 79 |   -V --verbose        Incremental verbosity to STDOUT.
 80 |   -h --help           Print a usage statement and exit.
 81 |   -m --man            Print the complete documentation and exit.
 82 |   -v --version        Print the version number and exit.
 83 | 
 84 | =head1 Author
 85 | 
 86 | David E. Wheeler 
 87 | 
 88 | =head1 Copyright and License
 89 | 
 90 | Copyright (c) 2011-2024 David E. Wheeler.
 91 | 
 92 | This module is free software; you can redistribute it and/or modify it under
 93 | the L.
 94 | 
 95 | Permission to use, copy, modify, and distribute this software and its
 96 | documentation for any purpose, without fee, and without a written agreement is
 97 | hereby granted, provided that the above copyright notice and this paragraph
 98 | and the following two paragraphs appear in all copies.
 99 | 
100 | In no event shall David E. Wheeler be liable to any party for direct,
101 | indirect, special, incidental, or consequential damages, including lost
102 | profits, arising out of the use of this software and its documentation, even
103 | if David E. Wheeler has been advised of the possibility of such damage.
104 | 
105 | David E. Wheeler specifically disclaims any warranties, including, but not
106 | limited to, the implied warranties of merchantability and fitness for a
107 | particular purpose. The software provided hereunder is on an "as is" basis,
108 | and David E. Wheeler has no obligations to provide maintenance, support,
109 | updates, enhancements, or modifications.
110 | 
111 | =cut
112 | 


--------------------------------------------------------------------------------
/lib/PGXN/API.pm:
--------------------------------------------------------------------------------
  1 | package PGXN::API;
  2 | 
  3 | use v5.14;
  4 | use utf8;
  5 | use MooseX::Singleton;
  6 | use File::Spec::Functions qw(catfile catdir);
  7 | use URI::Template;
  8 | use JSON;
  9 | use namespace::autoclean;
 10 | our $VERSION = v0.20.3;
 11 | 
 12 | =head1 Name
 13 | 
 14 | PGXN::API - Maintain and serve a REST API to search PGXN mirrors
 15 | 
 16 | =head1 Synopsis
 17 | 
 18 | In a cron job:
 19 | 
 20 |   * * * * 42 pgxn_api_sync --root /var/www/api rsync://master.pgxn.org/pgxn/
 21 | 
 22 | In a system start script:
 23 | 
 24 |   pgxn_api_server --doc-root    /var/www/api \
 25 |                   --errors-from oops@example.com \
 26 |                   --errors-to   alerts@example.com
 27 | 
 28 | =head1 Description
 29 | 
 30 | L is a L-inspired network for
 31 | distributing extensions for the L.
 32 | All of the infrastructure tools, however, have been designed to be used to
 33 | create networks for distributing any kind of release distributions and for
 34 | providing a lightweight static file JSON REST API.
 35 | 
 36 | PGXN::API provides a superset of the static file REST API, embellishing the
 37 | metadata in some files and providing additional APIs, including full-text
 38 | search and browsable access to all packages on the mirror. Hit the L for the canonical deployment of this module.
 40 | Better yet, read the L or use L if you
 42 | just want to use the API.
 43 | 
 44 | There are two simple steps to setting up your own API server using this
 45 | module:
 46 | 
 47 | =over
 48 | 
 49 | =item * L
 50 | 
 51 | This script syncs to a PGXN mirror via rsync and processes newly-synced data
 52 | to provide the additional data and APIs. Any PGXN mirror will do. If you need
 53 | to create your own network of mirrors first, see
 54 | L. Consult the
 55 | L documentation for details on its (minimal) options.
 56 | 
 57 | =item * L
 58 | 
 59 | A L server for the API. In addition to the usual L options, it
 60 | has a few of its own:
 61 | 
 62 | =over
 63 | 
 64 | =item C<--doc-root>
 65 | 
 66 | The path to use for the API document root. This is the same directory as you
 67 | manage via L in a cron job. Optional. If not specified, it will
 68 | default to a directory named F in the parent directory above the F
 69 | directory in which this module is installed. If you're running the API from a
 70 | Git checkout, that should be fine. Otherwise you should probably specify a
 71 | document root or you're you'll never be able to find it.
 72 | 
 73 | =item C<--errors-to>
 74 | 
 75 | An email address to which error emails should be sent. In the event of an
 76 | internal server error, the server will send an email to this address with
 77 | diagnostic information.
 78 | 
 79 | =item C<--errors-from>
 80 | 
 81 | An email address from which alert emails should be sent.
 82 | 
 83 | =back
 84 | 
 85 | =back
 86 | 
 87 | And that's it. If you're interested in the internals of PGXN::API or in
 88 | hacking on it, read on. Otherwise, just enjoy your own API server!
 89 | 
 90 | =head1 Interface
 91 | 
 92 | =head2 Constructor
 93 | 
 94 | =head3 C
 95 | 
 96 |   my $app = PGXN::Manager->instance;
 97 | 
 98 | Returns the singleton instance of PGXN::Manager. This is the recommended way
 99 | to get the PGXN::API object.
100 | 
101 | =head2 Class Method
102 | 
103 | =head3 C
104 | 
105 |   say 'PGXN::API ', PGXN::API->version_string;
106 | 
107 | Returns a string representation of the PGXN::API version.
108 | 
109 | =cut
110 | 
111 | sub version_string {
112 |     sprintf 'v%vd', $VERSION;
113 | }
114 | 
115 | =head2 Attributes
116 | 
117 | =head3 C
118 | 
119 |   my $templates = $pgxn->uri_templates;
120 | 
121 | Returns a hash reference of the URI templates for the various files stored in
122 | the API document root. The keys are the names of the templates, and the values
123 | are L objects. Includes the additional URI templates added by
124 | L.
125 | 
126 | =cut
127 | 
128 | has uri_templates => (is => 'ro', isa => 'HashRef', lazy => 1, default => sub {
129 |     my $self = shift;
130 |     my $tmpl = $self->read_json_from(
131 |         catfile $self->doc_root, 'index.json'
132 |     );
133 |     return { map { $_ => URI::Template->new($tmpl->{$_}) } keys %{ $tmpl } };
134 | });
135 | 
136 | =head3 C
137 | 
138 |   my $doc_root = $pgxn->doc_root;
139 | 
140 | Returns the document root for the API server. The default is the F
141 | directory in the root directory of this distribution.
142 | 
143 | =cut
144 | 
145 | my $trig = sub {
146 |     my ($self, $dir) = @_;
147 |      if (!-e $dir) {
148 |          require File::Path;
149 |          File::Path::make_path($dir);
150 | 
151 |          # Copy over the index.html.
152 |          require File::Copy::Recursive;
153 | 
154 |          (my $api_dir = __FILE__) =~ s{[.]pm$}{};
155 |          my $idx  = catfile $api_dir, 'index.html';
156 |          File::Copy::Recursive::fcopy($idx, $dir)
157 |              or die "Cannot copy $idx to $dir: $!\n";
158 | 
159 |          # Pre-generate the metadata directories.
160 |          File::Path::make_path(catdir $dir, $_)
161 |              for qw(user tag dist extension);
162 |      } elsif (!-d $dir) {
163 |          die qq{Location for document root "$dir" is not a directory\n};
164 |      }
165 | };
166 | 
167 | has doc_root => (is => 'rw', isa => 'Str', lazy => 1, trigger => $trig, default => sub {
168 |      my $file = quotemeta catfile qw(lib PGXN API.pm);
169 |      my $blib = quotemeta catfile 'blib', '';
170 |      (my $dir = __FILE__) =~ s{(?:$blib)?$file$}{www};
171 |      $trig->(shift, $dir);
172 |      $dir;
173 | });
174 | 
175 | =head3 C
176 | 
177 |   my $source_dir = $pgxn->source_dir;
178 | 
179 | Returns the directory on the file system where sources should be unzipped,
180 | which is just the F subdirectory of C.
181 | 
182 | =cut
183 | 
184 | has source_dir => (is => 'ro', 'isa' => 'Str', lazy => 1, default => sub {
185 |     my $dir = catdir shift->doc_root, 'src';
186 |     if (!-e $dir) {
187 |         require File::Path;
188 |         File::Path::make_path($dir);
189 |     } elsif (!-d $dir) {
190 |         die qq{Location for source files "$dir" is not a directory\n};
191 |     }
192 |     $dir;
193 | });
194 | 
195 | =head3 C
196 | 
197 |   my $mirror_root = $pgxn->mirror_root;
198 | 
199 | Returns the directory on the file system where the PGXN mirror lives, which is
200 | just the F subdirectory of C.
201 | 
202 | =cut
203 | 
204 | has mirror_root => (is => 'rw', 'isa' => 'Str', lazy => 1, default => sub {
205 |     my $dir = catdir shift->doc_root, 'mirror';
206 |     if (!-e $dir) {
207 |         require File::Path;
208 |         File::Path::make_path($dir);
209 |     } elsif (!-d $dir) {
210 |         die qq{Location for source files "$dir" is not a directory\n};
211 |     }
212 |     $dir;
213 | });
214 | 
215 | =head3 C
216 | 
217 |   my $data = $pgxn->read_json_from($filename);
218 | 
219 | Loads the contents of C<$filename>, parses them as JSON, and returns the
220 | resulting data structure.
221 | 
222 | =cut
223 | 
224 | sub read_json_from {
225 |     my ($self, $fn) = @_;
226 |     open my $fh, '<:raw', $fn or die "Cannot open $fn: $!\n";
227 |     local $/;
228 |     return JSON->new->utf8->decode(<$fh>);
229 | }
230 | 
231 | =head3 C
232 | 
233 |   my $data = $pgxn->write_json_to($filename, $data);
234 | 
235 | Writes C<$data> to C<$filename> as JSON.
236 | 
237 | =cut
238 | 
239 | sub write_json_to {
240 |     my ($self, $fn, $data) = @_;
241 |     my $encoder = JSON->new->space_after->allow_nonref->indent->canonical;
242 |     open my $fh, '>:utf8', $fn or die "Cannot open $fn: $!\n";
243 |     print $fh $encoder->encode($data);
244 |     close $fh or die "Cannot close $fn: $!\n";
245 | }
246 | 
247 | __PACKAGE__->meta->make_immutable;
248 | 
249 | 1;
250 | 
251 | __END__
252 | 
253 | =head1 Support
254 | 
255 | This module is stored in an open L. Feel free to fork and
257 | contribute!
258 | 
259 | Please file bug reports via L or by sending mail to
261 | L.
262 | 
263 | =head1 See Also
264 | 
265 | =over
266 | 
267 | =item L
268 | 
269 | The heart of any PGXN network, PGXN::Manager manages distribution uploads and
270 | mirror maintenance. You'll want to look at it if you plan to build your own
271 | network.
272 | 
273 | =item L
274 | 
275 | Comprehensive documentation of the APIs provided by both mirror servers and
276 | API servers powered by PGXN::API.
277 | 
278 | =item L
279 | 
280 | A Perl interface over a PGXN mirror or API. Able to read the mirror or API via
281 | HTTP or from the local file system.
282 | 
283 | =item L
284 | 
285 | A layer over the PGXN API providing a nicely-formatted Web site for folks to
286 | perform full text searches, read documentation, or browse information about
287 | users, distributions, tags, and extensions.
288 | 
289 | =item L
290 | 
291 | The implementation for L.
292 | 
293 | =item L
294 | 
295 | Does the heavy lifting of processing distributions and indexing them for the
296 | API.
297 | 
298 | =item L
299 | 
300 | Interface for accessing the PGXN::API full text indexes. Used to do the work
301 | of the C API.
302 | 
303 | =back
304 | 
305 | =head1 Author
306 | 
307 | David E. Wheeler 
308 | 
309 | =head1 Copyright and License
310 | 
311 | Copyright (c) 2011-2024 David E. Wheeler.
312 | 
313 | This module is free software; you can redistribute it and/or modify it under
314 | the L.
315 | 
316 | Permission to use, copy, modify, and distribute this software and its
317 | documentation for any purpose, without fee, and without a written agreement is
318 | hereby granted, provided that the above copyright notice and this paragraph
319 | and the following two paragraphs appear in all copies.
320 | 
321 | In no event shall David E. Wheeler be liable to any party for direct,
322 | indirect, special, incidental, or consequential damages, including lost
323 | profits, arising out of the use of this software and its documentation, even
324 | if David E. Wheeler has been advised of the possibility of such damage.
325 | 
326 | David E. Wheeler specifically disclaims any warranties, including, but not
327 | limited to, the implied warranties of merchantability and fitness for a
328 | particular purpose. The software provided hereunder is on an "as is" basis,
329 | and David E. Wheeler has no obligations to provide maintenance, support,
330 | updates, enhancements, or modifications.
331 | 
332 | =cut
333 | 


--------------------------------------------------------------------------------
/lib/PGXN/API/Router.pm:
--------------------------------------------------------------------------------
  1 | package PGXN::API::Router;
  2 | 
  3 | use v5.14;
  4 | use utf8;
  5 | use PGXN::API;
  6 | use Plack::Builder;
  7 | use Plack::App::File;
  8 | use Plack::App::Directory;
  9 | use PGXN::API::Searcher;
 10 | use List::MoreUtils qw(any);
 11 | use JSON;
 12 | use Plack::Request;
 13 | use Encode;
 14 | use File::Spec::Functions qw(catdir);
 15 | use namespace::autoclean;
 16 | our $VERSION = v0.20.3;
 17 | 
 18 | sub app {
 19 |     my ($class, %params) = @_;
 20 | 
 21 |     unless ($params{errors_to} && $params{errors_from}) {
 22 |         die "Missing required parameters errors_to and errors_from\n";
 23 |     }
 24 | 
 25 |     PGXN::API->instance->doc_root(delete $params{doc_root})
 26 |         if $params{doc_root};
 27 |     my $root = PGXN::API->instance->doc_root;
 28 | 
 29 |     # Identify distribution files as zip files.
 30 |     my ($zip_ext) = PGXN::API->instance->uri_templates->{download} =~ /([.][^.]+)$/;
 31 |     $Plack::MIME::MIME_TYPES->{$zip_ext} = $Plack::MIME::MIME_TYPES->{'.zip'};
 32 | 
 33 |     builder {
 34 |         enable 'ErrorDocument', 500, '/error', subrequest => 1;
 35 |         enable 'HTTPExceptions';
 36 |         enable 'StackTrace', no_print_errors => 1;
 37 |         enable 'JSONP', callback_key => 'callback';
 38 |         enable sub {
 39 |             my $app = shift;
 40 |             sub {
 41 |                 my $res = $app->(@_);
 42 |                 Plack::Util::response_cb($res, sub {
 43 |                     my $res = shift;
 44 |                     push @{ $res->[1] },
 45 |                         'X-PGXN-API-Version' => PGXN::API->version_string;
 46 |                 });
 47 |             };
 48 |         };
 49 | 
 50 |         # Serve most stuff as plain files.
 51 |         my $files = Plack::App::File->new(root => $root)->to_app;
 52 |         mount '/' => sub {
 53 |             my $env = shift;
 54 |             $env->{PATH_INFO} = '/index.html' if $env->{PATH_INFO} eq '/';
 55 |             $files->($env);
 56 |         };
 57 | 
 58 |         # Handle searches.
 59 |         my $searcher = PGXN::API::Searcher->new($root);
 60 |         mount '/search' => sub {
 61 |             my $req = Plack::Request->new(shift);
 62 | 
 63 |             # Make sure we have a valid request.
 64 |             local $1;
 65 |             return [
 66 |                 404,
 67 |                 ['Content-Type' => 'text/plain', 'Content-Length' => 9],
 68 |                 ['not found']
 69 |             ] if $req->path_info !~ m{^/((?:d(?:oc|ist)|extension|user|tag)s)/?$};
 70 |             my $in = $1;
 71 | 
 72 |             my $params = $req->query_parameters;
 73 |             my $q = $params->{q};
 74 |             return [
 75 |                 400,
 76 |                 ['Content-Type' => 'text/plain', 'Content-Length' => 38],
 77 |                 ['Bad request: Invalid or missing "q" query param.']
 78 |             ] if !defined $q || any { $q eq $_ } '', '*', '?';
 79 | 
 80 |             # Make sure "o" and "l" params are valid.
 81 |             for my $param (qw(o l)) {
 82 |                 my $val = $params->{$param};
 83 |                 return [
 84 |                     400,
 85 |                     ['Content-Type' => 'text/plain', 'Content-Length' => 37],
 86 |                     [qq{Bad request: invalid "$param" query param.}]
 87 |                 ] if $val && $val !~ /^\d+$/;
 88 |             }
 89 | 
 90 |             # Give 'em the results.
 91 |             my $json = encode_json $searcher->search(
 92 |                 in     => $in,
 93 |                 query  => decode_utf8($q),
 94 |                 offset => scalar $params->{o},
 95 |                 limit  => scalar $params->{l},
 96 |             );
 97 |             return [
 98 |                 200,
 99 |                 ['Content-Type' => 'application/json', 'Content-Length' => length $json ],
100 |                 [$json],
101 |             ]
102 |         };
103 | 
104 |         # For source browsing, some things should be text/plain.
105 |         my $mimes = { %{ $Plack::MIME::MIME_TYPES } };
106 |         for my $ext (keys %{ $mimes }) {
107 |             $mimes->{$ext} = 'text/plain'
108 |                 if $mimes->{$ext} =~ /html|x-c|xml|calendar|vcard/
109 |                 || any { $ext eq $_ } qw(.bat .css .eml .js .json .mime .swf);
110 |         }
111 |         my $src_dir = Plack::App::Directory->new(
112 |             root => catdir $root, 'src'
113 |         )->to_app;
114 | 
115 |         mount '/src' => sub {
116 |             local $Plack::MIME::MIME_TYPES = $mimes;
117 |             $src_dir->(shift)
118 |         };
119 | 
120 |         mount '/_index' => sub {
121 |             # Never allow access here.
122 |             return [
123 |                 404,
124 |                 ['Content-Type' => 'text/plain', 'Content-Length' => 9],
125 |                 ['not found']
126 |             ];
127 |         };
128 | 
129 |         mount '/error' => sub {
130 |             my $env = shift;
131 | 
132 |             # Pull together the original request environment.
133 |             my $err_env = { map {
134 |                 my $k = $_;
135 |                 s/^psgix[.]errordocument[.]//
136 |                     ? /plack[.]stacktrace[.]/ ? () : ($_ => $env->{$k} )
137 |                     : ();
138 |             } keys %{ $env } };
139 |             my $uri = Plack::Request->new($err_env)->uri;
140 | 
141 |             if (%{ $err_env }) {
142 |                 # Send an email to the administrator.
143 |                 # XXX Need configuration.
144 |                 require Email::MIME;
145 |                 require Email::Sender::Simple;
146 |                 require Data::Dump;
147 |                 my $email = Email::MIME->create(
148 |                     header     => [
149 |                         From    => $params{errors_from},
150 |                         To      => $params{errors_to},
151 |                         Subject => 'PGXN API Internal Server Error',
152 |                     ],
153 |                     attributes => {
154 |                         content_type => 'text/plain',
155 |                         charset      => 'UTF-8',
156 |                     },
157 |                     body    => "An error occurred during a request to $uri.\n\n"
158 |                              . "Environment:\n\n" . Data::Dump::pp($err_env)
159 |                              . "\n\nTrace:\n\n"
160 |                              . ($env->{'plack.stacktrace.text'} || 'None found. :-(')
161 |                              . "\n",
162 |                 );
163 |                 Email::Sender::Simple->send($email);
164 |             }
165 | 
166 |             return [
167 |                 200, # Only handled by ErrorDocument, which keeps 500.
168 |                 ['Content-Type' => 'text/plain', 'Content-Length' => 21],
169 |                 ['internal server error']
170 |             ];
171 |         };
172 | 
173 |     };
174 | }
175 | 
176 | 1;
177 | 
178 | =head1 Name
179 | 
180 | PGXN::API::Router - The PGXN::API request router.
181 | 
182 | =head1 Synopsis
183 | 
184 |   # In app.pgsi
185 |   use PGXN::API::Router;
186 |   PGXN::API::Router->app;
187 | 
188 | =head1 Description
189 | 
190 | This class defines the HTTP request routing table used by PGXN::API. Unless
191 | you're modifying the PGXN::API routes, you won't have to worry about it. Just
192 | know that this is the class that Plack uses to fire up the app.
193 | 
194 | =head1 Interface
195 | 
196 | =head2 Class Methods
197 | 
198 | =head3 C
199 | 
200 |   PGXN::API->app(%params);
201 | 
202 | Returns the PGXN::API Plack app. The supported parameters are:
203 | 
204 | =over
205 | 
206 | =item C
207 | 
208 | The path to use for the API document root.
209 | 
210 | =item C
211 | 
212 | An email address to which error emails should be sent.
213 | 
214 | =item C
215 | 
216 | An email address from which alert emails should be sent.
217 | 
218 | =back
219 | 
220 | See L for an example usage.
221 | 
222 | =head1 Author
223 | 
224 | David E. Wheeler 
225 | 
226 | =head1 Copyright and License
227 | 
228 | Copyright (c) 2011-2024 David E. Wheeler.
229 | 
230 | This module is free software; you can redistribute it and/or modify it under
231 | the L.
232 | 
233 | Permission to use, copy, modify, and distribute this software and its
234 | documentation for any purpose, without fee, and without a written agreement is
235 | hereby granted, provided that the above copyright notice and this paragraph
236 | and the following two paragraphs appear in all copies.
237 | 
238 | In no event shall David E. Wheeler be liable to any party for direct,
239 | indirect, special, incidental, or consequential damages, including lost
240 | profits, arising out of the use of this software and its documentation, even
241 | if David E. Wheeler has been advised of the possibility of such damage.
242 | 
243 | David E. Wheeler specifically disclaims any warranties, including, but not
244 | limited to, the implied warranties of merchantability and fitness for a
245 | particular purpose. The software provided hereunder is on an "as is" basis,
246 | and David E. Wheeler has no obligations to provide maintenance, support,
247 | updates, enhancements, or modifications.
248 | 
249 | =cut
250 | 


--------------------------------------------------------------------------------
/lib/PGXN/API/Sync.pm:
--------------------------------------------------------------------------------
  1 | package PGXN::API::Sync;
  2 | 
  3 | use v5.14;
  4 | use utf8;
  5 | use Moose;
  6 | use PGXN::API;
  7 | use PGXN::API::Indexer;
  8 | use Digest::SHA1;
  9 | use List::Util qw(first);
 10 | use File::Spec::Functions qw(catfile path rel2abs tmpdir catdir);
 11 | use File::Path qw(make_path);
 12 | use Cwd;
 13 | use Archive::Zip qw(:ERROR_CODES);
 14 | use constant WIN32 => $^O eq 'MSWin32';
 15 | use Moose::Util::TypeConstraints;
 16 | use namespace::autoclean;
 17 | our $VERSION = v0.20.3;
 18 | 
 19 | subtype Executable => as 'Str', where {
 20 |     my $exe = $_;
 21 |     first { -f $_ && -x _ } $exe, map { catfile $_, $exe } path;
 22 | };
 23 | 
 24 | has rsync_path   => (is => 'rw', isa => 'Executable', default => 'rsync', required => 1);
 25 | has source       => (is => 'rw', isa => 'Str', required => 1);
 26 | has verbose      => (is => 'rw', isa => 'Int', default => 0);
 27 | has log_file     => (is => 'rw', isa => 'Str', required => 1, default => sub {
 28 |     catfile tmpdir, "pgxn-api-sync-$$.txt"
 29 | });
 30 | has mirror_uri_templates => (is => 'ro', isa => 'HashRef', lazy => 1, default => sub {
 31 |     my $self = shift;
 32 |     my $api  = PGXN::API->instance;
 33 |     my $tmpl = $api->read_json_from(catfile $api->mirror_root, 'index.json');
 34 |     return { map { $_ => URI::Template->new($tmpl->{$_}) } keys %{ $tmpl } };
 35 | });
 36 | 
 37 | sub run {
 38 |     my $self = shift;
 39 |     $self->run_rsync;
 40 |     $self->update_index;
 41 | }
 42 | 
 43 | sub DESTROY {
 44 |     my $self = shift;
 45 |     unlink $self->log_file;
 46 |     $self->SUPER::DESTROY;
 47 | }
 48 | 
 49 | sub run_rsync {
 50 |     my $self = shift;
 51 | 
 52 |     # Sync the mirror.
 53 |     say "Updating the mirror from ", $self->source if $self->verbose;
 54 |     system (
 55 |         $self->rsync_path,
 56 |         qw(--archive --compress --delete --quiet),
 57 |         '--log-file-format' => '%i %n',
 58 |         '--log-file'        => $self->log_file,
 59 |         $self->source,
 60 |         PGXN::API->instance->mirror_root,
 61 |     ) == 0 or die;
 62 | }
 63 | 
 64 | sub update_index {
 65 |     my $self    = shift;
 66 |     my $indexer = PGXN::API::Indexer->new(verbose => $self->verbose);
 67 | 
 68 |     my $meta_re = $self->regex_for_uri_template('meta');
 69 |     my $mirr_re = $self->regex_for_uri_template('mirrors');
 70 |     my $spec_re = $self->regex_for_uri_template('spec');
 71 |     my $stat_re = $self->regex_for_uri_template('stats');
 72 |     my $user_re = $self->regex_for_uri_template('user');
 73 |     my $log     = $self->log_file;
 74 | 
 75 |     say 'Parsing the rsync log file' if $self->verbose > 1;
 76 |     open my $fh, '<:encoding(UTF-8)', $log or die "Canot open $log: $!\n";
 77 |     while (my $line = <$fh>) {
 78 |         if ($line =~ $meta_re) {
 79 |             if (my $params = $self->validate_distribution($1)) {
 80 |                 $indexer->add_distribution($params);
 81 |             }
 82 |         }
 83 |         elsif ($line =~ $stat_re || $line =~ $mirr_re) {
 84 |             $indexer->copy_from_mirror($1);
 85 |         }
 86 |         elsif ($line =~ $spec_re) {
 87 |             my $path = $1;
 88 |             $indexer->copy_from_mirror($path);
 89 |             $indexer->parse_from_mirror($path, 'Multimarkdown');
 90 |         }
 91 |         elsif ($line =~ /\s>f(?:[+]+|(?:c|.s|..t)[^ ]+)\sindex[.]json$/) {
 92 |             # Always update the index JSON if it's mentioned.
 93 |             $indexer->update_root_json;
 94 |         }
 95 |         elsif ($line =~ $user_re) {
 96 |             $indexer->merge_user($2);
 97 |         }
 98 |     }
 99 |     close $fh or die "Cannot close $log: $!\n";
100 |     $indexer->finalize;
101 |     say 'Sync complete' if $self->verbose;
102 |     return $self;
103 | }
104 | 
105 | sub regex_for_uri_template {
106 |     my ($self, $name) = @_;
107 | 
108 |     # Get the URI for the template.
109 |     my $uri = $self->mirror_uri_templates->{$name}->process(
110 |         map { $_ => "{$_}" } qw(dist version user extension tag stats format)
111 |     );
112 | 
113 |     my %regex_for = (
114 |         '{dist}'      => qr{[^/]+?},
115 |         # https://regex101.com/r/vkijKf/
116 |         '{version}'   => qr{(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?},
117 |         '{user}'      => qr{([a-z]([-a-z0-9]{0,61}[a-z0-9])?)}i,
118 |         '{extension}' => qr{[^/]+?},
119 |         '{tag}'       => qr{[^/]+?},
120 |         '{stats}'     => qr{(?:dist|tag|user|extension|summary)},
121 |         '{format}'    => qr{(?:txt|html|atom|xml)},
122 |     );
123 | 
124 |     # Assemble the regex corresponding to the template.
125 |     my $regex = join '', map {
126 |         $regex_for{$_} || quotemeta $_
127 |     } grep { defined && length } map {
128 |         split /(\{.+?\})/
129 |     } catfile grep { defined && length } $uri->path_segments;
130 | 
131 |     # Return the regex to match new or updated in rsync output lines.
132 |     return qr{\s>f(?:[+]+|(?:c|.s|..t)[^ ]+)\s($regex)$};
133 | 
134 |     # The rsync %i output format:
135 |     # YXcstpogz    # Snow Leopard
136 |     # YXcstpoguax  # Debian
137 |     # c: checkum has changed, file will be updated
138 |     # s: file size has changed, file will be updated
139 |     # t: modtime has changed, file will be updated
140 |     # +++++++: New item
141 | }
142 | 
143 | 
144 | sub validate_distribution {
145 |     my ($self, $fn) = shift->_rel_to_mirror(@_);
146 |     my $meta     = PGXN::API->instance->read_json_from($fn);
147 |     my $zip_path = $self->download_for($meta);
148 | 
149 |     # Validate it against the SHA1 checksum.
150 |     say '  Checksumming ', $zip_path if $self->verbose;
151 |     if ($self->digest_for($zip_path) ne $meta->{sha1}) {
152 |         warn "Checksum verification failed for $fn\n";
153 |         return;
154 |     }
155 | 
156 |     # Unpack the distribution.
157 |     my $zip = $self->unzip($zip_path, $meta) or return;
158 |     return { meta => $meta, zip => $zip };
159 | }
160 | 
161 | sub download_for {
162 |     my ($self, $meta) = @_;
163 |     my $zip_uri = $self->mirror_uri_templates->{download}->process(
164 |         dist    => lc $meta->{name},
165 |         version => lc $meta->{version},
166 |     );
167 | 
168 |     my (undef, @segments) = $zip_uri->path_segments;
169 |     return catfile @segments;
170 | }
171 | 
172 | sub digest_for {
173 |     my ($self, $fn) = shift->_rel_to_mirror(@_);
174 |     open my $fh, '<:raw', $fn or die "Cannot open $fn: $!\n";
175 |     my $sha1 = Digest::SHA1->new;
176 |     $sha1->addfile($fh);
177 |     return $sha1->hexdigest;
178 | }
179 | 
180 | sub unzip {
181 |     say '  Extracting ', $_[1] if $_[0]->verbose;
182 |     my ($self, $zip_path, $meta) = shift->_rel_to_mirror(@_);
183 | 
184 |     my $zip = Archive::Zip->new;
185 |     if ($zip->read(rel2abs $zip_path) != AZ_OK) {
186 |         warn "Error reading $zip_path\n";
187 |         return;
188 |     }
189 | 
190 |     my $dist_dir = catdir(
191 |         PGXN::API->instance->source_dir,
192 |         lc $meta->{name}
193 |     );
194 |     make_path $dist_dir unless -e $dist_dir && -d _;
195 | 
196 |     foreach my $member ($zip->members) {
197 |         my $fn = catfile $dist_dir, split m{/} => $member->fileName;
198 |         say "    $fn\n" if $self->verbose > 2;
199 | 
200 |         if ($member->isSymbolicLink) {
201 |             # Delete exsting so Archive::Zip won't fail to create it.
202 |             warn "Cannot unlink $fn: $!\n" if -e $fn && !unlink $fn;
203 |         } else {
204 |             # Make sure the member is readable by everyone.
205 |             $member->unixFileAttributes( $member->isDirectory ? 0755 : 0644 );
206 |         }
207 | 
208 |         if ($member->extractToFileNamed($fn) != AZ_OK) {
209 |             warn "Error extracting $fn from $zip_path\n";
210 |             next;
211 |         }
212 |     }
213 | 
214 |     return $zip;
215 | }
216 | 
217 | sub _rel_to_mirror {
218 |     return shift, catfile(+PGXN::API->instance->mirror_root, shift), @_;
219 | }
220 | 
221 | __PACKAGE__->meta->make_immutable(inline_destructor => 0);
222 | 
223 | 1;
224 | 
225 | __END__
226 | 
227 | =head1 Name
228 | 
229 | PGXN::API::Sync - Sync from a PGXN mirror and update the index
230 | 
231 | =head1 Synopsis
232 | 
233 |   use PGXN::API::Sync;
234 |   PGXN::API::Sync->new(
235 |       source     => $source,
236 |       rsync_path => $rsync_path,
237 |       verbose    => $verbose,
238 |   )->run;
239 | 
240 | =head1 Description
241 | 
242 | This module provides the implementation for L, the command-line
243 | utility for syncing to a PGXN mirror and creating the API. It syncs to the
244 | specified PGXN rsync source URL, which should be a PGXN mirror server, and
245 | then verifies and unpacks newly-uploaded distributions and hands them off to
246 | L to index.
247 | 
248 | =head1 Class Interface
249 | 
250 | =head2 Constructor
251 | 
252 | =head3 C
253 | 
254 |   my $sync = PGXN::API::Sync->new(%params);
255 | 
256 | Creates and returns a new PGXN::API::Sync object. The supported parameters
257 | are:
258 | 
259 | =over
260 | 
261 | =item C
262 | 
263 | Path to the rsync executable. Defaults to C, which should work find if
264 | there is an executable with that name in your path.
265 | 
266 | =item C
267 | 
268 | An C URL specifying the source from which to sync. The source should be
269 | an C server serving up a PGXN mirror source as created or mirrored from
270 | a PGXN Manager server.
271 | 
272 | =item C
273 | 
274 | An incremental integer specifying the level of verbosity to use during a sync.
275 | By default, PGXN::API::Sync runs in quiet mode, where only errors are emitted
276 | to C.
277 | 
278 | =back
279 | 
280 | =head1 Instance Interface
281 | 
282 | =head2 Instance Methods
283 | 
284 | =head3 C
285 | 
286 |   $sync->run;
287 | 
288 | Runs the sync, Cing from the source mirror server, verifying and
289 | unpacking distributions, and handing them off to the indexer for indexing.
290 | This is the main method called by L to just do the job.
291 | 
292 | =head3 C
293 | 
294 |   $sync->run_rsync;
295 | 
296 | Cs from the source mirror server. Called by C.
297 | 
298 | =head3 C
299 | 
300 |   $sync->update_index;
301 | 
302 | Parses the log generated by the execution of C for new
303 | distribution F files and passes any found off to
304 | C and L for
305 | validation, unpacking, and indexing. Called internally by C.
306 | 
307 | =head3 C
308 | 
309 |   my $regex = $sync->regex_for_uri_template('download');
310 | 
311 | Returns a regular expression that will match the path to a file in the rsync
312 | logs. The regular expression is created from a named URI template as loaded
313 | from the F file synced from the mirror server. Used internally to
314 | parse the paths to distribution files from the rsync logs so that they can be
315 | validated, unpacked, and indexed.
316 | 
317 | =head3 C
318 | 
319 |   my $download = $sync->download_for($meta);
320 | 
321 | Given the metadata loaded from a mirror server F file, returns the
322 | path to the download file for the distribution. Used internally by
323 | C to find the file to validate.
324 | 
325 | =head3 C
326 | 
327 |   my $params = $sync->validate_distribution($path_to_dist_meta);
328 | 
329 | Given the path to a distribution F file, this method validates the
330 | digest for the download file and unpacks it. Returns parameters suitable for
331 | passing to L for indexing.
332 | 
333 | =head3 C
334 | 
335 |   my $digest = $sync->digest_for($zipfile);
336 | 
337 | Returns the SHA-1 hex digest for a distribution file (or any file, really).
338 | Called by C.
339 | 
340 | =head3 C
341 | 
342 |   $sync->unzip($download, $meta);)
343 | 
344 | Given a download file for a distribution, and the metadata loaded from the
345 | C describing the download, this method unpacks the download under
346 | the F directory under the document root. Each file will be readable by
347 | all users. This provides the browsable file interface for the API server to
348 | serve. Called internally by C.
349 | 
350 | =head2 Instance Accessors
351 | 
352 | =head3 C
353 | 
354 |   my $rsync_path = $sync->rsync_path;
355 |   $sync->rsync_path($rsync_path);
356 | 
357 | Get or set the path to the C executable.
358 | 
359 | =head3 C
360 | 
361 |   my $source = $sync->source;
362 |   $sync->source($source);
363 | 
364 | Get or set the source C URL from which to sync a PGXN mirror.
365 | 
366 | =head3 C
367 | 
368 |   my $verbose = $sync->verbose;
369 |   $sync->verbose($verbose);
370 | 
371 | Get or set an incremental verbosity. The higher the integer specified, the
372 | more verbose the sync.
373 | 
374 | =head3 C
375 | 
376 |   my $log_file = $sync->log_file;
377 |   $sync->log_file($log_file);
378 | 
379 | Get or set the path to use for the C log file. This file will then be
380 | parsed by C for new distributions to index.
381 | 
382 | =head3 C
383 | 
384 |   my $templates = $pgxn->mirror_uri_templates;
385 | 
386 | Returns a hash reference of the URI templates loaded from the F
387 | file in the mirror root. The keys are the names of the templates, and the
388 | values are L objects.
389 | 
390 | =cut
391 | 
392 | =head1 Author
393 | 
394 | David E. Wheeler 
395 | 
396 | =head1 Copyright and License
397 | 
398 | Copyright (c) 2011-2024 David E. Wheeler.
399 | 
400 | This module is free software; you can redistribute it and/or modify it under
401 | the L.
402 | 
403 | Permission to use, copy, modify, and distribute this software and its
404 | documentation for any purpose, without fee, and without a written agreement is
405 | hereby granted, provided that the above copyright notice and this paragraph
406 | and the following two paragraphs appear in all copies.
407 | 
408 | In no event shall David E. Wheeler be liable to any party for direct,
409 | indirect, special, incidental, or consequential damages, including lost
410 | profits, arising out of the use of this software and its documentation, even
411 | if David E. Wheeler has been advised of the possibility of such damage.
412 | 
413 | David E. Wheeler specifically disclaims any warranties, including, but not
414 | limited to, the implied warranties of merchantability and fitness for a
415 | particular purpose. The software provided hereunder is on an "as is" basis,
416 | and David E. Wheeler has no obligations to provide maintenance, support,
417 | updates, enhancements, or modifications.
418 | 
419 | =cut
420 | 


--------------------------------------------------------------------------------
/lib/PGXN/API/index.html:
--------------------------------------------------------------------------------
 1 | 
 3 | 
 4 |   
 5 |     PostgreSQL Extension Network API
 6 |   
 7 |   
 8 |     

PostgreSQL Extension Network API

9 |

This is the PGXN API server. You probably 10 | want PGXN. Or if you want to upload a 11 | distribution to PGXN, use PGXN 12 | Manager. If you actually want to use the PGXN API, you might 13 | want to have a look at the 14 | documentation.

15 | 16 | 17 | -------------------------------------------------------------------------------- /t/base.t: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env perl -w 2 | 3 | use strict; 4 | use warnings; 5 | use File::Spec::Functions qw(catdir catfile); 6 | use File::Path qw(remove_tree); 7 | use Test::File; 8 | use Test::More tests => 37; 9 | #use Test::More 'no_plan'; 10 | use Test::File::Contents; 11 | use File::Copy::Recursive qw(fcopy); 12 | use File::Temp; 13 | use JSON; 14 | use Cwd; 15 | 16 | my $CLASS; 17 | BEGIN { 18 | $CLASS = 'PGXN::API'; 19 | use_ok $CLASS or die; 20 | } 21 | 22 | can_ok $CLASS => qw( 23 | instance 24 | uri_templates 25 | source_dir 26 | read_json_from 27 | ); 28 | 29 | isa_ok my $pgxn = $CLASS->instance, $CLASS; 30 | is +$CLASS->instance, $pgxn, 'instance() should return a singleton'; 31 | is +$CLASS->instance, $pgxn, 'new() should return a singleton'; 32 | 33 | ############################################################################## 34 | # Test read_json_from() 35 | my $file = catfile qw(t root tag pair.json); 36 | open my $fh, '<:raw', $file or die "Cannot open $file: $!\n"; 37 | my $data = do { 38 | local $/; 39 | decode_json <$fh>; 40 | }; 41 | close $fh; 42 | is_deeply $pgxn->read_json_from($file), $data, 43 | 'read_json_from() should work'; 44 | 45 | ############################################################################## 46 | # Test write_json_to() 47 | my $tmpfile = 'tmp.json'; 48 | END { unlink $tmpfile } 49 | ok $pgxn->write_json_to($tmpfile => $data), 'Write JSON'; 50 | is_deeply $pgxn->read_json_from($tmpfile), $data, 51 | 'It should read back in properly'; 52 | 53 | # Test doc_root(). 54 | my $doc_root = catdir 't', 'test_base_root'; 55 | file_not_exists_ok $doc_root, 'Doc root should not yet exist'; 56 | $pgxn->doc_root($doc_root); 57 | END { remove_tree $doc_root } 58 | is $pgxn->doc_root, $doc_root, 'Should have doc root'; 59 | file_exists_ok $doc_root, 'Doc root should now exist'; 60 | file_exists_ok( 61 | catdir($doc_root, $_), 62 | "Subdiretory $_ should have been created" 63 | ) for qw(user tag dist extension); 64 | 65 | # Make sure index.html was created. 66 | file_exists_ok catfile($doc_root, 'index.html'), 'index.html should exist'; 67 | files_eq_or_diff( 68 | catfile($doc_root, 'index.html'), 69 | catfile(qw(lib PGXN API index.html)), 70 | 'And it should be the lib copy' 71 | ); 72 | 73 | # Test source_dir(). 74 | my $src_dir = catdir $pgxn->doc_root, 'src'; 75 | file_not_exists_ok $src_dir, 'Source dir should not yet exist'; 76 | is $pgxn->source_dir, $src_dir, 'Should have expected source directory'; 77 | file_exists_ok $src_dir, 'Source dir should now exist'; 78 | ok -d $src_dir, 'Source dir should be a directory'; 79 | 80 | # Test mirror_root(). 81 | my $mirror_root = catdir $pgxn->doc_root, 'mirror'; 82 | file_not_exists_ok $mirror_root, 'Mirror dir should not yet exist'; 83 | is $pgxn->mirror_root, $mirror_root, 'Should have expected source directory'; 84 | file_exists_ok $mirror_root, 'Mirror dir should now exist'; 85 | ok -d $mirror_root, 'Mirror dir should be a directory'; 86 | 87 | # Make sure the URI templates are created. 88 | fcopy catfile(qw(t root index.json)), $doc_root; 89 | ok my $tmpl = $pgxn->uri_templates, 'Get URI templates'; 90 | isa_ok $tmpl, 'HASH', 'Their storage'; 91 | isa_ok $tmpl->{$_}, 'URI::Template', "Template $_" for keys %{ $tmpl }; 92 | -------------------------------------------------------------------------------- /t/bin/testrsync: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl -w 2 | 3 | open my $fh, '>', 'test.tmp' or die "Cannot open test.tmp: $!\n"; 4 | print $fh $_, $/ for @ARGV; 5 | -------------------------------------------------------------------------------- /t/bin/testrsync.bat: -------------------------------------------------------------------------------- 1 | @rem = '--*-Perl-*-- 2 | @echo off 3 | if "%OS%" == "Windows_NT" goto WinNT 4 | perl -x -S "%0" %1 %2 %3 %4 %5 %6 %7 %8 %9 5 | goto endofperl 6 | :WinNT 7 | perl -x -S %0 %* 8 | if NOT "%COMSPEC%" == "%SystemRoot%\system32\cmd.exe" goto endofperl 9 | if %errorlevel% == 9009 echo You do not have Perl in your PATH. 10 | if errorlevel 1 goto script_failed_so_exit_with_non_zero_val 2>nul 11 | goto endofperl 12 | @rem '; 13 | #!/usr/bin/perl -w 14 | 15 | open my $fh, '>', 'test.tmp' or die "Cannot open test.tmp: $!\n"; 16 | print $fh $_, $/ for @ARGV; 17 | 18 | __END__ 19 | :endofperl 20 | -------------------------------------------------------------------------------- /t/data/kv-tag-updated.json: -------------------------------------------------------------------------------- 1 | { 2 | "tag": "key value", 3 | "releases": { 4 | "pair": { 5 | "stable": [ 6 | {"version": "0.1.2", "date": "2010-11-03T06:23:28Z"}, 7 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 8 | ], 9 | "testing": [ 10 | {"version": "0.1.1", "date": "2010-10-29T22:44:42Z"} 11 | ] 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /t/data/ordered-tag-updated.json: -------------------------------------------------------------------------------- 1 | { 2 | "tag": "ordered pair", 3 | "releases": { 4 | "pair": { 5 | "stable": [ 6 | {"version": "0.1.2", "date": "2010-11-03T06:23:28Z"}, 7 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 8 | ], 9 | "testing": [ 10 | {"version": "0.1.1", "date": "2010-10-29T22:44:42Z"} 11 | ] 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /t/data/pair-ext-updated.json: -------------------------------------------------------------------------------- 1 | { 2 | "extension": "pair", 3 | "latest": "testing", 4 | "stable": { "dist": "pair", "version": "0.1.0", "sha1": "1234567890abcdef1234567890abcdef12345678" }, 5 | "testing": { "dist": "pair", "version": "0.1.1", "sha1": "c552c961400253e852250c5d2f3def183c81adb3" }, 6 | "versions": { 7 | "0.1.1": [ 8 | { "dist": "pair", "version": "0.1.1" } 9 | ], 10 | "0.1.0": [ 11 | { "dist": "pair", "version": "0.1.0" } 12 | ] 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /t/data/pair-ext-updated2.json: -------------------------------------------------------------------------------- 1 | { 2 | "extension": "pair", 3 | "latest": "testing", 4 | "stable": { "dist": "pair", "version": "0.1.2", "sha1": "cebefd23151b4b797239646f7ae045b03d028fcf" }, 5 | "testing": { "dist": "pair", "version": "0.1.1", "sha1": "c552c961400253e852250c5d2f3def183c81adb3" }, 6 | "versions": { 7 | "0.1.1": [ 8 | { "dist": "otherdist", "version": "0.3.0" }, 9 | { "dist": "pair", "version": "0.1.1" } 10 | ], 11 | "0.1.0": [ 12 | { "dist": "pair", "version": "0.1.0" } 13 | ] 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /t/data/pair-ext-updated3.json: -------------------------------------------------------------------------------- 1 | { 2 | "extension": "pair", 3 | "latest": "stable", 4 | "stable": { "dist": "pair", "version": "0.1.2", "sha1": "cebefd23151b4b797239646f7ae045b03d028fcf" }, 5 | "testing": { "dist": "pair", "version": "0.1.1", "sha1": "c552c961400253e852250c5d2f3def183c81adb3" }, 6 | "versions": { 7 | "0.1.2": [ 8 | { "dist": "pair", "version": "0.1.2" } 9 | ], 10 | "0.1.1": [ 11 | { "dist": "otherdist", "version": "0.3.0" }, 12 | { "dist": "pair", "version": "0.1.1" } 13 | ], 14 | "0.1.0": [ 15 | { "dist": "pair", "version": "0.1.0" } 16 | ] 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /t/data/pair-ext-updated4.json: -------------------------------------------------------------------------------- 1 | { 2 | "extension": "pair", 3 | "latest": "stable", 4 | "stable": { "dist": "pair", "version": "0.1.3", "sha1": "cebefd23151b4b797239646f7ae045b03d028fcf" }, 5 | "testing": { "dist": "pair", "version": "0.1.1", "sha1": "c552c961400253e852250c5d2f3def183c81adb3" }, 6 | "versions": { 7 | "0.1.3": [ 8 | { "dist": "pair", "version": "0.1.2" } 9 | ], 10 | "0.1.1": [ 11 | { "dist": "otherdist", "version": "0.3.0" }, 12 | { "dist": "pair", "version": "0.1.1" } 13 | ], 14 | "0.1.0": [ 15 | { "dist": "pair", "version": "0.1.0" } 16 | ] 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /t/data/pair-tag-updated.json: -------------------------------------------------------------------------------- 1 | { 2 | "tag": "pair", 3 | "releases": { 4 | "pair": { 5 | "stable": [ 6 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 7 | ], 8 | "testing": [ 9 | {"version": "0.1.1", "date": "2010-10-29T22:44:42Z"} 10 | ] 11 | }, 12 | "pgTAP": { 13 | "stable": [ 14 | {"version": "0.25.0", "date": "2011-01-22T08:34:51Z"} 15 | ] 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /t/data/pair-tag-updated2.json: -------------------------------------------------------------------------------- 1 | { 2 | "tag": "pair", 3 | "releases": { 4 | "pair": { 5 | "stable": [ 6 | {"version": "0.1.2", "date": "2010-11-03T06:23:28Z"}, 7 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 8 | ], 9 | "testing": [ 10 | {"version": "0.1.1", "date": "2010-10-29T22:44:42Z"} 11 | ] 12 | }, 13 | "pgTAP": { 14 | "stable": [ 15 | {"version": "0.25.0", "date": "2011-01-22T08:34:51Z"} 16 | ] 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /t/data/pair-updated.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pair", 3 | "releases": { 4 | "stable": [ 5 | {"version": "0.1.1", "date": "2010-10-29T22:44:42Z"}, 6 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 7 | ], 8 | "testing": [ 9 | {"version": "0.1.2", "date": "2010-12-13T23:12:41Z"} 10 | ] 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /t/data/pair-updated2.json: -------------------------------------------------------------------------------- 1 | { 2 | "tag": "pair", 3 | "releases": { 4 | "pair": { 5 | "stable": [ 6 | {"version": "0.1.2", "date": "2010-11-03T06:23:28Z"}, 7 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 8 | ], 9 | "testing": [ 10 | {"version": "0.1.1", "date": "2010-10-29T22:44:42Z"} 11 | ] 12 | }, 13 | "pgTAP": { 14 | "stable": [ 15 | {"version": "0.25.0", "date": "2011-01-22T08:34:51Z"} 16 | ] 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /t/data/rsync.out: -------------------------------------------------------------------------------- 1 | 2011/03/01 15:55:18 [49405] receiving file list 2 | 2011/03/01 15:55:18 [49406] cd+++++++ ./ 3 | 2011/03/01 15:55:18 [49406] >f+++++++ README 4 | 2011/03/01 15:55:18 [49406] >f+++++++ index.html 5 | 2011/03/01 15:55:18 [49406] >f+++++++ index.json 6 | 2011/03/01 15:55:18 [49406] cd+++++++ 7 | 2011/03/01 15:55:18 [49406] cd+++++++ dist/ 8 | 2011/03/01 15:55:18 [49406] >f+++++++ dist/pair.json 9 | 2011/03/01 15:55:18 [49406] >f+++++++ dist/pg_french_datatypes.json 10 | 2011/03/01 15:55:18 [49406] >f+++++++ dist/tinyint.json 11 | 2011/03/01 15:55:18 [49406] cd+++++++ extension/ 12 | 2011/03/01 15:55:18 [49406] >f+++++++ extension/pair.json 13 | 2011/03/01 15:55:18 [49406] >f+++++++ extension/pg_french_datatypes.json 14 | 2011/03/01 15:55:18 [49406] >f+++++++ extension/tinyint.json 15 | 2011/03/01 15:55:18 [49406] cd+++++++ user/ 16 | 2011/03/01 15:55:18 [49406] >f+++++++ user/daamien.json 17 | 2011/03/01 15:55:18 [49406] >f+++++++ user/theory.json 18 | 2011/03/01 15:55:18 [49406] >f+++++++ user/umitanuki.json 19 | 2011/03/01 15:55:18 [49406] cd+++++++ tag/ 20 | 2011/03/01 15:55:18 [49406] >f+++++++ tag/data types.json 21 | 2011/03/01 15:55:18 [49406] >fcst.... tag/france.json 22 | 2011/03/01 15:55:18 [49406] >f.st.... tag/key value pair.json 23 | 2011/03/01 15:55:18 [49406] >f..t.... tag/key value.json 24 | 2011/03/01 15:55:18 [49406] >f.s..... tag/ordered pair.json 25 | 2011/03/01 15:55:18 [49406] >fc...... tag/pair.json 26 | 2011/03/01 15:55:18 [49406] >fcs..... tag/variadic function.json 27 | 2011/03/01 15:55:18 [49406] cd+++++++ dist/ 28 | 2011/03/01 15:55:19 [49406] cd+++++++ dist/pair/ 29 | 2011/03/01 15:55:19 [49406] cd+++++++ dist/pair/0.1.0/ 30 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pair/0.1.0/META.json 31 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pair/0.1.0/pair-0.1.0.zip 32 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pair/0.1.0/README.txt 33 | 2011/03/01 15:55:19 [49406] cd+++++++ dist/pair/0.1.1/ 34 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pair/0.1.1/META.json 35 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pair/0.1.1/pair-0.1.1.zip 36 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pair/0.1.1/README.txt 37 | 2011/03/01 15:55:19 [49406] cd+++++++ dist/pg_french_datatypes/ 38 | 2011/03/01 15:55:19 [49406] cd+++++++ dist/pg_french_datatypes/0.1.0/ 39 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pg_french_datatypes/0.1.0/META.json 40 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pg_french_datatypes/0.1.0/pg_french_datatypes-0.1.0.zip 41 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pg_french_datatypes/0.1.0/README.txt. 42 | 2011/03/01 15:55:19 [49406] cd+++++++ dist/pg_french_datatypes/0.1.1/ 43 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pg_french_datatypes/0.1.1/META.json 44 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pg_french_datatypes/0.1.1/pg_french_datatypes-0.1.1.zip 45 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/pg_french_datatypes/0.1.1/README.txt 46 | 2011/03/01 15:55:19 [49406] cd+++++++ dist/tinyint/ 47 | 2011/03/01 15:55:19 [49406] cd+++++++ dist/tinyint/0.1.0/ 48 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/tinyint/0.1.0/META.json 49 | 2011/03/01 15:55:19 [49406] >f+++++++ dist/tinyint/0.1.0/tinyint-0.1.0.zip 50 | 2011/03/01 15:55:19 [49406] cd+++++++ meta/ 51 | 2011/03/01 15:55:19 [49406] >f+++++++ meta/mirrors.json 52 | 2011/03/01 15:55:19 [49406] >f+++++++ meta/spec.txt 53 | 2011/03/01 15:55:19 [49406] >f+++++++ meta/timestamp 54 | 2011/03/01 15:55:19 [49406] cd+++++++ stats/ 55 | 2011/03/01 15:55:19 [49406] >f+++++++ stats/dist.json 56 | 2011/03/01 15:55:19 [49406] >f+++++++ stats/extension.json 57 | 2011/03/01 15:55:19 [49406] >f+++++++ stats/user.json 58 | 2011/03/01 15:55:19 [49406] >f+++++++ stats/tag.json 59 | 2011/03/01 15:55:19 [49406] >f+++++++ stats/summary.json 60 | 2011/03/01 15:55:19 [49405] sent 1556 bytes received 1868 bytes total size 302398 61 | -------------------------------------------------------------------------------- /t/data/theory-updated.json: -------------------------------------------------------------------------------- 1 | { 2 | "nickname": "theory", 3 | "name": "David E. Wheeler", 4 | "email": "david@justatheory.com", 5 | "uri": "http://justatheory.com/", 6 | "releases": { 7 | "explanation": { 8 | "stable": [ 9 | {"version": "0.2.0", "date": "2011-02-21T20:14:56Z"} 10 | ] 11 | }, 12 | "pair": { 13 | "stable": [ 14 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 15 | ], 16 | "testing": [ 17 | {"version": "0.1.1", "date": "2010-10-29T22:44:42Z"} 18 | ] 19 | }, 20 | "pgTAP": { 21 | "stable": [ 22 | {"version": "0.25.0", "date": "2011-02-02T03:25:17Z"} 23 | ] 24 | }, 25 | "semver": { 26 | "stable": [ 27 | {"version": "0.2.0", "date": "2011-02-05T19:31:38Z"} 28 | ] 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /t/data/theory-updated2.json: -------------------------------------------------------------------------------- 1 | { 2 | "nickname": "theory", 3 | "name": "David E. Wheeler", 4 | "email": "david@justatheory.com", 5 | "uri": "http://justatheory.com/", 6 | "releases": { 7 | "explanation": { 8 | "stable": [ 9 | {"version": "0.2.0", "date": "2011-02-21T20:14:56Z"} 10 | ] 11 | }, 12 | "pair": { 13 | "stable": [ 14 | {"version": "0.1.2", "date": "2010-11-03T06:23:28Z"}, 15 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 16 | ], 17 | "testing": [ 18 | {"version": "0.1.1", "date": "2010-10-29T22:44:42Z"} 19 | ] 20 | }, 21 | "pgTAP": { 22 | "stable": [ 23 | {"version": "0.25.0", "date": "2011-02-02T03:25:17Z"} 24 | ] 25 | }, 26 | "semver": { 27 | "stable": [ 28 | {"version": "0.2.0", "date": "2011-02-05T19:31:38Z"} 29 | ] 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /t/docs.t: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env perl -w 2 | 3 | use strict; 4 | use warnings; 5 | use Test::More 0.88; 6 | use PGXN::API::Indexer; 7 | use Test::File::Contents; 8 | use File::Basename; 9 | use File::Spec::Functions qw(catfile catdir tmpdir); 10 | use Encode qw(encode_utf8); 11 | use utf8; 12 | 13 | my $indexer = new_ok 'PGXN::API::Indexer'; 14 | my $libxml = XML::LibXML->new( 15 | recover => 2, 16 | no_network => 1, 17 | no_blanks => 1, 18 | no_cdata => 1, 19 | ); 20 | 21 | # Unfortunately, we have to write to a file, because file_contents_eq_or_diff 22 | # doesn't seem to work on Windows. 23 | my $tmpfile = catfile tmpdir, 'pgxnapi-doctest$$.html'; 24 | 25 | END { unlink $tmpfile } 26 | 27 | for my $in (glob catfile qw(t htmlin *)) { 28 | my $doc = $libxml->parse_html_file($in, { 29 | suppress_warnings => 1, 30 | suppress_errors => 1, 31 | recover => 2, 32 | }); 33 | 34 | my $html = PGXN::API::Indexer::_clean_html_body($doc->findnodes('/html/body')); 35 | open my $fh, '>:raw', $tmpfile or die "Cannot open $tmpfile: $!\n"; 36 | print $fh encode_utf8 $html, "\n"; 37 | close $fh; 38 | # last if $in =~ /shift/; next; 39 | # diag $html if $in =~ /head/; next; 40 | files_eq_or_diff( 41 | $tmpfile, 42 | catfile(qw(t htmlout), basename $in), 43 | "Test HTML from $in", 44 | { encoding => 'UTF-8' } 45 | ); 46 | } 47 | 48 | done_testing; 49 | 50 | -------------------------------------------------------------------------------- /t/htmlin/basic.html: -------------------------------------------------------------------------------- 1 | 4 | 5 | 6 | 7 | 8 | Whatever 9 | 10 | 11 | 12 | 13 | 14 | 22 |
23 |
24 |

Body

25 |
26 |
27 | 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /t/htmlin/bulkload.html: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 7 | 8 | 9 | 10 | pg_bulkload: Project Home Page 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 22 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /t/htmlin/headers.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Whatever 4 | 5 | 6 |

pgTAP 0.25.0

7 | 8 |

pgTAP is a unit testing framework for PostgreSQL written in PL/pgSQL and 9 | PL/SQL. It includes a comprehensive collection of TAP-emitting assertion 10 | functions, as well as the ability to integrate with other TAP-emitting test 11 | frameworks. It can also be used in the xUnit testing style.

12 | 13 |

Testing pgTAP with pgTAP

14 |

ok( boolean, description )

15 | 16 |

Testing pgTAP with pgTAP

17 |

ok( boolean, description )

18 |

hi there

19 |

The End

20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /t/htmlin/omnipitr.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 |

new()

8 | 9 |

Object contstructor.

10 | 11 |

show_help_and_die()

12 | 13 |

Just as the name suggests - calling this method will print help for program, and exit it with error-code (1).

14 | 15 |

Empty lines, and comment lines (starting with #) are ignored.

16 | 17 |

POD ERRORS

18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /t/htmlin/shiftjis.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgxn/pgxn-api/57b4cc21fffcd9faf9c8f0663b424993b389bfa6/t/htmlin/shiftjis.html -------------------------------------------------------------------------------- /t/htmlin/unwanted.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Whatever 4 | 5 | 6 |

A collection of unwanted stuff

7 | 8 |

Let us center this blink, yo.

9 | 10 |

Oh, no font, either

11 | 12 |

There are some unwanted tags, too, like:

13 | 14 | 15 |
16 |

ha ha we have a form!

17 | 18 | 19 |
20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /t/htmlin/utf8.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Whatéver 5 | 6 | 7 |

This döcument has some UTF-8 in it.

8 | 9 |

粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃

10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /t/htmlout/basic.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |

Contents

4 | 7 |
8 |
9 |
10 |
11 |
12 |

Title

13 |

Blah blah blah

14 |
15 |
16 |
17 |
18 |
19 |

Body

20 |
21 |
22 |
23 |
24 | -------------------------------------------------------------------------------- /t/htmlout/bulkload.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |

Contents

4 | 9 |
10 |
11 | 12 |

Copyright (c) 2007-2015, NIPPON TELEGRAPH AND TELEPHONE CORPORATION

13 | 14 |
15 |
16 | -------------------------------------------------------------------------------- /t/htmlout/headers.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |

Contents

4 | 21 |
22 |
23 |

pgTAP 0.25.0

24 | 25 |

pgTAP is a unit testing framework for PostgreSQL written in PL/pgSQL and 26 | PL/SQL. It includes a comprehensive collection of TAP-emitting assertion 27 | functions, as well as the ability to integrate with other TAP-emitting test 28 | frameworks. It can also be used in the xUnit testing style.

29 | 30 |

Testing pgTAP with pgTAP

31 |

ok( boolean, description )

32 | 33 |

Testing pgTAP with pgTAP

34 |

ok( boolean, description )

35 |

hi there

36 |

The End

37 | 38 | 39 |
40 |
41 | -------------------------------------------------------------------------------- /t/htmlout/omnipitr.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |

Contents

4 | 15 |
16 |
17 | 18 |

new()

19 | 20 |

Object contstructor.

21 | 22 |

show_help_and_die()

23 | 24 |

Just as the name suggests - calling this method will print help for program, and exit it with error-code (1).

25 | 26 |

Empty lines, and comment lines (starting with #) are ignored.

27 | 28 |

POD ERRORS

29 | 30 |
31 |
32 | -------------------------------------------------------------------------------- /t/htmlout/shiftjis.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |

Contents

4 |
    5 |
6 |
7 |
8 |

This document has some SHIFT_JS in it.

9 | 10 |

今回の地震の影響で一部製品の使い方相談窓口業務を停止しております。

11 |
12 |
13 | -------------------------------------------------------------------------------- /t/htmlout/unwanted.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |

Contents

4 | 7 |
8 |
9 |

A collection of unwanted stuff

10 | 11 |

Let us center this blink, yo.

12 | 13 |

Oh, no font, either

14 | 15 |

There are some unwanted tags, too, like:

16 | 17 | 18 | 19 | 20 |
21 |
22 | -------------------------------------------------------------------------------- /t/htmlout/utf8.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |

Contents

4 |
    5 |
6 |
7 |
8 |

This döcument has some UTF-8 in it.

9 | 10 |

粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃

11 | 12 |
13 |
14 | -------------------------------------------------------------------------------- /t/indexer.t: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env perl -w 2 | 3 | use strict; 4 | use warnings; 5 | use Test::More tests => 268; 6 | # use Test::More 'no_plan'; 7 | use File::Copy::Recursive qw(dircopy fcopy); 8 | use File::Path qw(remove_tree); 9 | use File::Spec::Functions qw(catfile catdir rel2abs); 10 | use PGXN::API::Sync; 11 | use PGXN::API::Searcher; 12 | use Test::File; 13 | use Test::Exception; 14 | use Test::File::Contents; 15 | use Test::MockModule; 16 | use Archive::Zip; 17 | use utf8; 18 | 19 | my $CLASS; 20 | BEGIN { 21 | if ($] < 5.022) { 22 | # Set the locale to C so testing of error messages works correctly. 23 | require POSIX; 24 | POSIX::setlocale(&POSIX::LC_ALL, 'C'); 25 | } 26 | $File::Copy::Recursive::KeepMode = 0; 27 | $CLASS = 'PGXN::API::Indexer'; 28 | use_ok $CLASS or die; 29 | } 30 | 31 | can_ok $CLASS => qw( 32 | new 33 | verbose 34 | to_index 35 | libxml 36 | index_dir 37 | schemas 38 | indexer_for 39 | update_root_json 40 | copy_from_mirror 41 | parse_from_mirror 42 | add_distribution 43 | copy_files 44 | merge_distmeta 45 | update_user 46 | merge_user 47 | update_tags 48 | update_extensions 49 | finalize 50 | update_user_lists 51 | parse_docs 52 | mirror_file_for 53 | doc_root_file_for 54 | _idx_distmeta 55 | _get_user_name 56 | _strip_html 57 | _update_releases 58 | _index_user 59 | _index 60 | _rollback 61 | _commit 62 | _uri_for 63 | _source_files 64 | _readme 65 | _clean_html_body 66 | ); 67 | 68 | # Make sure Text::Markup recognizes the "none" parser for text files. 69 | is +Text::Markup->guess_format("foo.text"), "none", 70 | 'Text::Markup should parse .text files with the "none" parser'; 71 | is +Text::Markup->guess_format("foo.txt"), "none", 72 | 'Text::Markup should parse .txt files with the "none" parser'; 73 | 74 | my $api = PGXN::API->instance; 75 | my $doc_root = catdir 't', 'test_indexer_root'; 76 | $api->doc_root($doc_root); 77 | END { remove_tree $doc_root } 78 | 79 | # "Sync" from a "mirror." 80 | dircopy catdir(qw(t root)), $api->mirror_root; 81 | 82 | # Mock indexing stuff. 83 | my $mock = Test::MockModule->new($CLASS); 84 | $mock->mock(_commit => sub { shift }); 85 | 86 | ############################################################################## 87 | # Test update_root_json(). 88 | my $indexer = new_ok $CLASS; 89 | file_not_exists_ok catfile($doc_root, qw(index.json)), 'index.json should not exist'; 90 | ok $indexer->update_root_json, 'Update from the mirror'; 91 | file_exists_ok catfile($doc_root, qw(index.json)), 'index.json should now exist'; 92 | 93 | # Make sure it has all the templates we need. 94 | my $tmpl = $api->read_json_from(catfile qw(t root index.json)); 95 | $tmpl->{source} = "/src/{dist}/{dist}-{version}/"; 96 | $tmpl->{htmldoc} = "/dist/{dist}/{version}/{+docpath}.html"; 97 | $tmpl->{search} = '/search/{in}/'; 98 | $tmpl->{userlist} = '/users/{letter}.json'; 99 | is_deeply $api->read_json_from(catfile($doc_root, qw(index.json))), $tmpl, 100 | 'index.json should have additional templates'; 101 | 102 | # Make sure that PGXN::API is aware of them. 103 | is_deeply [sort keys %{ $api->uri_templates } ], 104 | [qw(dist download extension htmldoc meta mirrors readme search source 105 | spec stats tag user userlist)], 106 | 'PGXN::API should see the additional templates'; 107 | 108 | # Do it again, just for good measure. 109 | ok $indexer->update_root_json, 'Update from the mirror'; 110 | file_exists_ok catfile($doc_root, qw(index.json)), 'index.json should now exist'; 111 | 112 | ############################################################################## 113 | # Test copy_from_mirror(). 114 | my $spec = catfile $api->doc_root, qw(meta spec.txt); 115 | file_not_exists_ok $spec, 'Doc root spec.txt should not exist'; 116 | ok $indexer->copy_from_mirror('meta/spec.txt'), 'Copy spec.txt'; 117 | file_exists_ok $spec, 'Doc root spec.txt should now exist'; 118 | files_eq $spec, catfile($api->mirror_root, qw(meta spec.txt)), 119 | 'And it should be a copy from the mirror'; 120 | 121 | ############################################################################## 122 | # Test parse_from_mirror(). 123 | my $htmlspec = catfile $api->doc_root, qw(meta spec.html); 124 | file_not_exists_ok $htmlspec, 'Doc root spec.html should not exist'; 125 | ok $indexer->parse_from_mirror('meta/spec.txt'), 'Parse spec.txt'; 126 | file_exists_ok $htmlspec, 'Doc root spec.html should now exist'; 127 | file_contents_like $htmlspec, qr{
Name}, 'And it should look like HTML';
 128 | 
 129 | # Try it with a format.
 130 | ok $indexer->parse_from_mirror('meta/spec.txt', 'Multimarkdown'),
 131 |     'Parse spec.txt as MultiMarkdown';
 132 | file_contents_like $htmlspec, qr{

Name

}, 133 | 'And it should look like Multimarkdown-generated HTML'; 134 | 135 | # Try it with an emptyish file. 136 | my $empty = catfile $api->mirror_root, 'empty.md'; 137 | open my $fh, '>', $empty or die "Cannot open $empty: $!\n"; 138 | print $fh "\n \n"; 139 | close $fh; 140 | my $empty_file = catfile $api->doc_root, 'empty.html'; 141 | ok $indexer->parse_from_mirror('empty.md'), 'Parse empty.md'; 142 | file_not_exists_ok $empty_file || unlink $empty_file; 143 | 144 | ############################################################################## 145 | # Let's index pair-0.1.0. 146 | my $meta = $api->read_json_from( 147 | catfile $api->mirror_root, qw(dist pair 0.1.0 META.json) 148 | ); 149 | 150 | file_not_exists_ok( 151 | catfile($api->doc_root, qw(dist pair 0.1.0), 'pair-0.1.0.zip'), 152 | 'pair-0.1.0.zip should not yet exist' 153 | ); 154 | 155 | file_not_exists_ok( 156 | catfile($api->doc_root, qw(dist pair 0.1.0), 'README.txt'), 157 | 'README.txt should not yet exist' 158 | ); 159 | 160 | my $params = { meta => $meta }; 161 | ok $indexer->copy_files($params), 'Copy files'; 162 | 163 | file_exists_ok( 164 | catfile($api->doc_root, qw(dist pair 0.1.0), "pair-0.1.0.zip"), 165 | "pair-0.1.0.zip should now exist" 166 | ); 167 | file_not_exists_ok( 168 | catfile($api->doc_root, qw(dist pair 0.1.0), "README.txt"), 169 | "pair/0.1.0/README.txt still should not exist" 170 | ); 171 | 172 | # Make sure we get an error when we try to copy a file that does't exist. 173 | $meta->{name} = 'nonexistent'; 174 | my $src = catfile $api->mirror_root, qw(dist nonexistent 0.1.0 nonexistent-0.1.0.zip); 175 | my $dst = catfile $api->doc_root, qw(dist nonexistent 0.1.0 nonexistent-0.1.0.zip); 176 | throws_ok { $indexer->copy_files($params ) } 177 | qr{Cannot copy \Q$src\E to \Q$dst\E: No such file or directory}, 178 | 'Should get exception with file names for bad copy'; 179 | $meta->{name} = 'pair'; 180 | 181 | ############################################################################## 182 | # Now merge the distribution metadata files. 183 | my $dist_file = catfile $api->doc_root, qw(dist pair 0.1.0 META.json); 184 | my $dist = catfile $api->doc_root, qw(dist pair.json); 185 | my $docs = { 'docs/pair' => { 186 | title => 'pair', 187 | abstract => 'Key value pair', 188 | }}; 189 | $mock->mock(parse_docs => sub { $docs }); 190 | 191 | # Set up zip archive. 192 | my $zip = Archive::Zip->new; 193 | $zip->read(rel2abs catfile qw(t root dist pair 0.1.0 pair-0.1.0.zip)); 194 | $params->{zip} = $zip; 195 | 196 | file_not_exists_ok $dist_file, 'pair-0.1.0.json should not yet exist'; 197 | file_not_exists_ok $dist, 'pair.json should not yet exist'; 198 | 199 | ok $indexer->merge_distmeta($params), 'Merge the distmeta'; 200 | 201 | file_exists_ok $dist_file, 'pair-0.1.0.json should now exist'; 202 | file_exists_ok $dist, 'pair.json should now exist'; 203 | 204 | my $readme = $zip->memberNamed('pair-0.1.0/README.md')->contents; 205 | utf8::decode $readme; 206 | $readme =~ s/^\s+//; 207 | $readme =~ s/\s+$//; 208 | $readme =~ s/[\t\n\r]+|\s{2,}/ /gms; 209 | 210 | is_deeply shift @{ $indexer->to_index->{dists} }, { 211 | abstract => 'A key/value pair data type', 212 | date => '2010-10-18T15:24:21Z', 213 | description => "This library contains a single PostgreSQL extension, a key/value pair data type called `pair`, along with a convenience function for constructing key/value pairs.", 214 | dist => 'pair', 215 | key => 'pair', 216 | user => 'Theory', 217 | readme => $readme, 218 | tags => "ordered pair\003Pair", 219 | user_name => 'David E. Wheeler', 220 | version => '0.1.0', 221 | }, 'Should have pair 0.1.0 queued for indexing'; 222 | 223 | # The two files should be identical. 224 | files_eq_or_diff $dist_file, $dist, 225 | 'pair-0.1.0.json and pair.json should be the same'; 226 | 227 | # Our metadata should have new info. 228 | is_deeply $meta->{releases}, 229 | { stable => [{version => '0.1.0', date => '2010-10-19T03:59:54Z'}] }, 230 | 'Meta should now have release info'; 231 | is_deeply $meta->{special_files}, [qw(README.md META.json Makefile)], 232 | 'And it should have special files'; 233 | is_deeply $meta->{docs}, $docs, 'Should have docs from parse_docs'; 234 | is $meta->{provides}{pair}{docpath}, 'docs/pair', 235 | 'Should have drived extension doc path into provides'; 236 | 237 | # So have a look at the contents. 238 | ok my $dist_meta = $api->read_json_from($dist_file), 239 | 'Read the merged distmeta'; 240 | is_deeply $dist_meta, $meta, 'And it should be the merged metadata'; 241 | 242 | # Now update with 0.1.1. "Sync" the updated pair.json. 243 | fcopy catfile(qw(t data pair-updated.json)), 244 | catfile($api->mirror_root, qw(dist pair.json)); 245 | 246 | # Set up the 0.1.1 metadata and zip archive. 247 | my $meta_011 = $api->read_json_from( 248 | catfile $api->mirror_root, qw(dist pair 0.1.1 META.json) 249 | ); 250 | my $zip_011 = Archive::Zip->new; 251 | $zip_011->read(rel2abs catfile qw(t root dist pair 0.1.1 pair-0.1.1.zip)); 252 | $zip_011->addString('# control file', 'pair-0.1.1/pair.control.in'); 253 | 254 | # Set up multiple docs to be returned by parse_docs. 255 | $docs = { 256 | 'docs/howto' => { 257 | extension => 'pair', 258 | title => 'pair Howto', 259 | abstract => 'How to use key value pair', 260 | }, 261 | 'docs/pair' => { 262 | title => 'pair 0.0.1', 263 | abstract => 'Key value pair', 264 | }, 265 | }; 266 | 267 | my $dist_011_file = catfile $api->doc_root, qw(dist pair 0.1.1 META.json); 268 | file_not_exists_ok $dist_011_file, 'pair/0.1.1/META.json should not yet exist'; 269 | $params->{meta} = $meta_011; 270 | $params->{zip} = $zip_011; 271 | do { 272 | # Don't persist the setting of _index_it by merge_distmeta. 273 | local $indexer->{_index_it} = 1; 274 | ok $indexer->merge_distmeta($params), 'Merge the distmeta'; 275 | }; 276 | file_exists_ok $dist_011_file, 'pair/0.1.1/META.json should now exist'; 277 | 278 | is_deeply $indexer->to_index->{dists}, [], 279 | 'Nothing should not be queued for indexing'; 280 | 281 | files_eq_or_diff $dist_011_file, $dist, 282 | 'pair/0.1.1/META.json and pair.json should be the same'; 283 | 284 | is_deeply $meta_011->{releases}, { stable => [ 285 | {version => '0.1.1', date => '2010-10-29T22:44:42Z'}, 286 | {version => '0.1.0', date => '2010-10-19T03:59:54Z'} 287 | ], testing => [ 288 | {version => '0.1.2', date => '2010-12-13T23:12:41Z'}, 289 | ] }, 'We should have the release data'; 290 | is_deeply $meta_011->{special_files}, 291 | [qw(Changes README.md META.json Makefile pair.control.in)], 292 | 'And it should have special files'; 293 | 294 | delete $docs->{'docs/howto'}{extension}; 295 | is_deeply $meta_011->{docs}, $docs, 'Should have docs without extension key'; 296 | is $meta_011->{provides}{pair}{docpath}, 'docs/howto', 297 | 'Should have explicit extension doc in provides'; 298 | 299 | ok $dist_meta = $api->read_json_from($dist_011_file), 300 | 'Read the 0.1.1 merged distmeta'; 301 | # 0.1.2 has been released but we haven't copied it to the doc root yet. 302 | is_deeply $dist_meta, $meta_011, 303 | 'And it should be the merged with all version info'; 304 | 305 | # Meanwhile, the old file should be the same as before, except that it should 306 | # now also have a list of all releases. 307 | ok $dist_meta = $api->read_json_from($dist_file), 308 | 'Read the older version distmeta'; 309 | $meta->{releases} = { stable => [ 310 | {version => '0.1.1', date => '2010-10-29T22:44:42Z'}, 311 | {version => '0.1.0', date => '2010-10-19T03:59:54Z'} 312 | ], testing => [ 313 | {version => '0.1.2', date => '2010-12-13T23:12:41Z'}, 314 | ] }; 315 | is_deeply $dist_meta, $meta, 'It should be updated with all versions'; 316 | $mock->unmock('parse_docs'); 317 | 318 | ############################################################################## 319 | # Now update the user metadata. 320 | my $user_file = catfile $doc_root, qw(user theory.json); 321 | file_not_exists_ok $user_file, "$user_file should not yet exist"; 322 | $params->{meta} = $meta; 323 | ok $indexer->update_user($params), 'Update the user metadata'; 324 | file_exists_ok $user_file, "$user_file should now exist"; 325 | 326 | is_deeply shift @{ $indexer->to_index->{users} }, { 327 | details => '', 328 | email => 'david@justatheory.com', 329 | key => 'theory', 330 | name => 'David E. Wheeler', 331 | uri => 'http://justatheory.com/', 332 | user => 'theory', 333 | }, 'Should have index data'; 334 | 335 | # Now make sure that it has the updated release metadata. 336 | ok my $mir_data = $api->read_json_from( 337 | catfile $doc_root, qw(mirror user theory.json) 338 | ),'Read the mirror user data file'; 339 | ok my $doc_data = $api->read_json_from($user_file), 340 | 'Read the doc root user data file'; 341 | $mir_data->{releases}{pair}{abstract} = 'A key/value pair data type'; 342 | 343 | is_deeply $doc_data, $mir_data, 344 | 'The doc root data should have the metadata for this release'; 345 | 346 | # Great, now update it. 347 | fcopy catfile(qw(t data theory-updated.json)), 348 | catfile($api->mirror_root, qw(user theory.json)); 349 | $params->{meta} = $meta_011; 350 | { 351 | # Tell it not to index. 352 | local $indexer->{_index_it} = 0; 353 | ok $indexer->update_user($params), 354 | 'Update the user metadata for pair 0.1.1'; 355 | } 356 | is_deeply $indexer->to_index->{users}, [], 357 | 'Should have no index update for test dist'; 358 | 359 | $mir_data->{releases}{pair}{stable} = [ 360 | {version => '0.1.0', date => '2010-10-19T03:59:54Z'}, 361 | ]; 362 | $mir_data->{releases}{pair}{testing} = [ 363 | {version => '0.1.1', date => '2010-10-29T22:44:42Z'}, 364 | ]; 365 | $mir_data->{releases}{pair}{abstract} = 'A key/value pair dåtå type'; 366 | ok $doc_data = $api->read_json_from($user_file), 367 | 'Read the doc root user data file again'; 368 | is_deeply $doc_data, $mir_data, 369 | 'The doc root data should have the metadata for 0.1.1'; 370 | 371 | # Now do another stable release. 372 | fcopy catfile(qw(t data theory-updated2.json)), 373 | catfile($api->mirror_root, qw(user theory.json)); 374 | my $meta_012 = $api->read_json_from( 375 | catfile $api->mirror_root, qw(dist pair 0.1.2 META.json) 376 | ); 377 | $params->{meta} = $meta_012; 378 | my $zip_012 = Archive::Zip->new; 379 | $zip_012->read(rel2abs catfile qw(t root dist pair 0.1.2 pair-0.1.2.zip)); 380 | $params->{zip} = $zip_012; 381 | ok $indexer->merge_distmeta($params), 'Merge the 0.1.2 distmeta'; 382 | ok $indexer->update_user($params), 383 | 'Update the user metadata for pair 0.1.2'; 384 | unshift @{ $mir_data->{releases}{pair}{stable} }, 385 | {version => '0.1.2', date => '2010-11-03T06:23:28Z'}; 386 | ok $doc_data = $api->read_json_from($user_file), 387 | 'Read the doc root user data file once more'; 388 | is_deeply $doc_data, $mir_data, 389 | 'The doc root data should have the metadata for 0.1.2'; 390 | 391 | my $readme_012 = $zip_012->memberNamed('pair-0.1.2/README.md')->contents; 392 | utf8::decode $readme_012; 393 | $readme_012 =~ s/^\s+//; 394 | $readme_012 =~ s/\s+$//; 395 | $readme_012 =~ s/[\t\n\r]+|\s{2,}/ /gms; 396 | 397 | is_deeply shift @{ $indexer->to_index->{dists} }, { 398 | abstract => 'A key/value pair dåtå type', 399 | date => '2010-11-10T12:18:03Z', 400 | description => 'This library contains a single PostgreSQL extension, a key/value pair data type called `pair`, along with a convenience function for constructing pairs.', 401 | dist => 'pair', 402 | key => 'pair', 403 | user => 'theory', 404 | readme => $readme_012, 405 | tags => "ordered pair\003pair\003key value", 406 | user_name => 'David E. Wheeler', 407 | version => "0.1.2", 408 | }, 'New version should be queued for indexing'; 409 | 410 | is_deeply shift @{ $indexer->to_index->{users} }, { 411 | details => '', 412 | email => 'david@justatheory.com', 413 | key => 'theory', 414 | name => 'David E. Wheeler', 415 | uri => 'http://justatheory.com/', 416 | user => 'theory', 417 | }, 'Should have user index data again'; 418 | 419 | ############################################################################## 420 | # Now update the tag metadata. 421 | my $pairkw_file = catfile $doc_root, qw(tag pair.json); 422 | my $orderedkw_file = catfile $doc_root, qw(tag), 'ordered pair.json'; 423 | my $keyvalkw_file = catfile $doc_root, qw(tag), 'key value.json'; 424 | file_not_exists_ok $pairkw_file, "$pairkw_file should not yet exist"; 425 | file_not_exists_ok $orderedkw_file, "$orderedkw_file should not yet exist"; 426 | file_not_exists_ok $keyvalkw_file, "$keyvalkw_file should not yet exist"; 427 | $params->{meta} = $meta; 428 | ok $indexer->update_tags($params), 'Update the tags'; 429 | file_exists_ok $pairkw_file, "$pairkw_file should now exist"; 430 | file_exists_ok $orderedkw_file, "$orderedkw_file should now exist"; 431 | file_not_exists_ok $keyvalkw_file, "$keyvalkw_file should still not exist"; 432 | 433 | is @{ $indexer->to_index->{tags} }, 2, 'Should have two tags'; 434 | is_deeply shift @{ $indexer->to_index->{tags} }, { 435 | key => 'ordered pair', 436 | tag => 'ordered pair', 437 | }, 'Should have "ordered pair" index data'; 438 | 439 | is_deeply shift @{ $indexer->to_index->{tags} }, { 440 | key => 'pair', 441 | tag => 'Pair', 442 | }, 'Should have "pair" index data'; 443 | 444 | my $pgtap = { stable => [{ version => "0.25.0", date => '2011-01-22T08:34:51Z'}] }; 445 | my $exp = { 446 | tag => 'pair', 447 | releases => { 448 | pair => { 449 | abstract => "A key/value pair data type", 450 | stable => [ 451 | {version => '0.1.0', date => '2010-10-19T03:59:54Z'}, 452 | ], 453 | }, 454 | pgTAP => $pgtap, 455 | }, 456 | }; 457 | 458 | # Check the contents of the two keywords on the doc root. 459 | ok my $pair_data = $api->read_json_from($pairkw_file), 460 | "Read JSON from $pairkw_file"; 461 | is_deeply $pair_data, $exp, "$pairkw_file should have the release data"; 462 | 463 | $exp->{tag} = 'ordered pair'; 464 | delete $exp->{releases}{pgTAP}; 465 | ok my $ord_data = $api->read_json_from($orderedkw_file), 466 | "Read JSON from $orderedkw_file"; 467 | is_deeply $ord_data, $exp, "$orderedkw_file should have the release data"; 468 | is @{ $indexer->to_index->{tags} }, 0, 'Should have no tags'; 469 | 470 | # Now update with 0.1.1. 471 | $params->{meta} = $meta_011; 472 | fcopy catfile(qw(t data pair-tag-updated.json)), 473 | catfile($api->mirror_root, qw(tag pair.json)); 474 | do { 475 | # Tell it not to index. 476 | local $indexer->{_index_it} = 0; 477 | ok $indexer->update_tags($params), 'Update the tags to 0.1.1'; 478 | }; 479 | file_exists_ok $keyvalkw_file, "$keyvalkw_file should now exist"; 480 | is_deeply $indexer->to_index, { 481 | map { $_ => [] } qw(docs dists extensions users tags) 482 | }, 'Should have no index updates for with _index_it false'; 483 | $indexer->to_index->{tags} = []; 484 | 485 | # Check the JSON data. 486 | $exp->{tag} = 'pair'; 487 | $exp->{releases}{pair}{stable} = [ 488 | {version => '0.1.0', date => '2010-10-19T03:59:54Z'}, 489 | ]; 490 | $exp->{releases}{pair}{testing} = [ 491 | {version => '0.1.1', date => '2010-10-29T22:44:42Z'}, 492 | ]; 493 | $exp->{releases}{pair}{abstract} = 'A key/value pair dåtå type'; 494 | $exp->{releases}{pgTAP} = $pgtap; 495 | 496 | ok $pair_data = $api->read_json_from($pairkw_file), 497 | "Read JSON from $pairkw_file again"; 498 | is_deeply $pair_data, $exp, "$pairkw_file should be updated for 0.1.1"; 499 | 500 | $exp->{tag} = 'ordered pair'; 501 | delete $exp->{releases}{pgTAP}; 502 | delete $exp->{releases}{pair}{testing}; 503 | ok $ord_data = $api->read_json_from($orderedkw_file), 504 | "Read JSON from $orderedkw_file again"; 505 | is_deeply $ord_data, $exp, "$orderedkw_file should be updated for 0.1.1"; 506 | 507 | $exp->{tag} = 'key value'; 508 | unshift @{ $exp->{releases}{pair}{stable} } => 509 | {"version" => "0.1.1", "date" => "2010-10-29T22:44:42Z"}; 510 | 511 | ok my $keyval_data = $api->read_json_from($keyvalkw_file), 512 | "Read JSON from $keyvalkw_file"; 513 | is_deeply $keyval_data, $exp, "$keyvalkw_file should have 0.1.1 data"; 514 | 515 | # And finally, update to 0.1.2. 516 | $params->{meta} = $meta_012; 517 | fcopy catfile(qw(t data pair-tag-updated2.json)), 518 | catfile($api->mirror_root, qw(tag pair.json)); 519 | fcopy catfile(qw(t data ordered-tag-updated.json)), 520 | catfile($api->mirror_root, qw(tag), 'ordered pair.json'); 521 | fcopy catfile(qw(t data kv-tag-updated.json)), 522 | catfile($api->mirror_root, qw(tag), 'key value.json'); 523 | ok $indexer->update_tags($params), 'Update the tags to 0.1.2'; 524 | 525 | is_deeply shift @{ $indexer->to_index->{tags} }, { 526 | key => 'ordered pair', 527 | tag => 'ordered pair', 528 | }, 'Should have "ordered pair" index data'; 529 | 530 | is_deeply shift @{ $indexer->to_index->{tags} }, { 531 | key => 'pair', 532 | tag => 'pair', 533 | }, 'Should have "pair" index data'; 534 | 535 | is_deeply shift @{ $indexer->to_index->{tags} }, { 536 | key => 'key value', 537 | tag => 'key value', 538 | }, 'Should have "key value" index data'; 539 | 540 | # Make sure all tags are updated. 541 | $exp->{tag} = 'pair'; 542 | $exp->{releases}{pair}{stable} = [ 543 | {version => '0.1.2', date => '2010-11-03T06:23:28Z'}, 544 | {version => '0.1.0', date => '2010-10-19T03:59:54Z'}, 545 | ]; 546 | $exp->{releases}{pair}{testing} = [ 547 | {version => '0.1.1', date => '2010-10-29T22:44:42Z'}, 548 | ]; 549 | $exp->{releases}{pgTAP} = $pgtap; 550 | 551 | ok $pair_data = $api->read_json_from($pairkw_file), 552 | "Read JSON from $pairkw_file once more"; 553 | is_deeply $pair_data, $exp, "$pairkw_file should be updated for 0.1.2"; 554 | 555 | $exp->{tag} = 'ordered pair'; 556 | delete $exp->{releases}{pgTAP}; 557 | ok $ord_data = $api->read_json_from($orderedkw_file), 558 | "Read JSON from $orderedkw_file once more"; 559 | is_deeply $ord_data, $exp, "$orderedkw_file should be updated for 0.1.2"; 560 | 561 | $exp->{tag} = 'key value'; 562 | ok $keyval_data = $api->read_json_from($keyvalkw_file), 563 | "Read JSON from $keyvalkw_file again"; 564 | is_deeply $keyval_data, $exp, "$keyvalkw_file should have 0.1.2 data"; 565 | 566 | ############################################################################## 567 | # Now update the extension metadata. 568 | $mock->mock(_idx_extdoc => sub { "Doc for $_[2]" }); 569 | my $ext_file = catfile $doc_root, qw(extension pair.json); 570 | file_not_exists_ok $ext_file, "$ext_file should not yet exist"; 571 | $params->{meta} = $meta; 572 | ok $indexer->update_extensions($params), 'Update the extension metadata'; 573 | file_exists_ok $ext_file, "$ext_file should now exist"; 574 | 575 | is_deeply shift @{ $indexer->to_index->{extensions} }, { 576 | abstract => 'A key/value pair data type', 577 | date => '2010-10-18T15:24:21Z', 578 | dist => 'pair', 579 | docpath => 'docs/pair', 580 | extension => 'pair', 581 | key => 'pair', 582 | user => 'Theory', 583 | user_name => 'David E. Wheeler', 584 | version => '0.1.0', 585 | }, 'Should have extension index data'; 586 | 587 | # Now make sure that it has the updated release metadata. 588 | $exp = { 589 | extension => 'pair', 590 | latest => 'stable', 591 | stable => { 592 | abstract => 'A key/value pair data type', 593 | docpath => 'docs/pair', 594 | dist => 'pair', 595 | version => '0.1.0', 596 | sha1 => '1234567890abcdef1234567890abcdef12345678', 597 | }, 598 | versions => { 599 | '0.1.0' => [ 600 | { 601 | dist => 'pair', 602 | date => '2010-10-18T15:24:21Z', 603 | version => '0.1.0', 604 | }, 605 | ], 606 | }, 607 | }; 608 | ok $doc_data = $api->read_json_from($ext_file), 609 | 'Read the doc root extension data file'; 610 | is_deeply $doc_data, $exp, 611 | 'The extension metadata should include the abstract and release date'; 612 | 613 | # Okay, update it with the testing release. 614 | fcopy catfile(qw(t data pair-ext-updated.json)), 615 | catfile($api->mirror_root, qw(extension pair.json)); 616 | $params->{meta} = $meta_011; 617 | ok $indexer->update_extensions($params), 618 | 'Update the extension metadata to 0.1.1'; 619 | 620 | # It should have indexed the testing release because there is no existin stable 621 | # release. Normally would not happen when there is an existing stable version 622 | # because merge_distmeta, called by add_distribution, would have set _index_it 623 | # to false. This test ensures that it indexes the highest status relaese. 624 | is_deeply shift @{ $indexer->to_index->{extensions} }, { 625 | abstract => 'A key/value pair data type', 626 | date => '2010-10-29T22:46:45Z', 627 | dist => 'pair', 628 | docpath => 'docs/howto', 629 | extension => 'pair', 630 | key => 'pair', 631 | user => 'theory', 632 | user_name => 'David E. Wheeler', 633 | version => '0.1.0', 634 | }, 'Should have stable extension index data'; 635 | 636 | $exp->{latest} = 'testing'; 637 | $exp->{testing} = { 638 | abstract => 'A key/value pair dåtå type', 639 | docpath => 'docs/howto', 640 | dist => 'pair', 641 | version => '0.1.1', 642 | sha1 => 'c552c961400253e852250c5d2f3def183c81adb3', 643 | }; 644 | $exp->{versions}{'0.1.1'} = [{ 645 | dist => 'pair', 646 | date => '2010-10-29T22:46:45Z', 647 | version => '0.1.1', 648 | }]; 649 | 650 | ok $doc_data = $api->read_json_from($ext_file), 651 | 'Read the doc root extension data file again'; 652 | is_deeply $doc_data, $exp, 653 | 'The extension metadata should include the testing data'; 654 | 655 | # Add this version to a different distribution. 656 | $meta_011->{name} = 'otherdist'; 657 | $meta_011->{version} = '0.3.0'; 658 | $meta_011->{release_status} = 'stable'; 659 | 660 | fcopy catfile(qw(t data pair-ext-updated2.json)), 661 | catfile($api->mirror_root, qw(extension pair.json)); 662 | ok $indexer->update_extensions($params), 663 | 'Add the extension to another distribution'; 664 | 665 | is_deeply shift @{ $indexer->to_index->{extensions} }, { 666 | abstract => 'A key/value pair dåtå type', 667 | date => '2010-10-29T22:46:45Z', 668 | dist => 'otherdist', 669 | docpath => 'docs/howto', 670 | extension => 'pair', 671 | key => 'pair', 672 | user => 'theory', 673 | user_name => 'David E. Wheeler', 674 | version => '0.1.2', 675 | }, 'Should have otherdidst extension index data'; 676 | 677 | ok $doc_data = $api->read_json_from($ext_file), 678 | 'Read the doc root extension data file once again'; 679 | unshift @{ $exp->{versions}{'0.1.1'} } => { 680 | dist =>'otherdist', 681 | date => '2010-10-29T22:46:45Z', 682 | version => '0.3.0' 683 | }; 684 | $exp->{stable} = { 685 | abstract => 'A key/value pair dåtå type', 686 | docpath => 'docs/howto', 687 | dist => 'pair', 688 | sha1 => 'cebefd23151b4b797239646f7ae045b03d028fcf', 689 | version => '0.1.2', 690 | }; 691 | is_deeply $doc_data, $exp, 692 | "The second distribution's metadata should new be present"; 693 | 694 | # Great! Now update it to 0.1.2. 695 | fcopy catfile(qw(t data pair-ext-updated3.json)), 696 | catfile($api->mirror_root, qw(extension pair.json)); 697 | $params->{meta} = $meta_012; 698 | ok $indexer->update_extensions($params), 699 | 'Update the extension to 0.1.2.'; 700 | 701 | is_deeply shift @{ $indexer->to_index->{extensions} }, { 702 | abstract => 'A key/value pair dåtå type', 703 | date => '2010-11-10T12:18:03Z', 704 | dist => 'pair', 705 | docpath => '', 706 | extension => 'pair', 707 | key => 'pair', 708 | user => 'theory', 709 | user_name => 'David E. Wheeler', 710 | version => '0.1.2', 711 | }, 'Should have extension index data again'; 712 | 713 | delete $exp->{stable}{docpath}; 714 | $exp->{latest} = 'stable'; 715 | $exp->{stable}{version} = '0.1.2'; 716 | $exp->{stable}{abstract} = 'A key/value pair dåtå type'; 717 | $exp->{stable}{sha1} = 'cebefd23151b4b797239646f7ae045b03d028fcf'; 718 | $exp->{versions}{'0.1.2'} = [{ 719 | dist => 'pair', 720 | date => '2010-11-10T12:18:03Z', 721 | version => '0.1.2', 722 | }]; 723 | ok $doc_data = $api->read_json_from($ext_file), 724 | 'Read the doc root extension data file one more time'; 725 | is_deeply $doc_data, $exp, 'Should now have the 0.1.2 metadata'; 726 | 727 | # Now a reindex where a version disappears. 728 | fcopy catfile(qw(t data pair-ext-updated4.json)), 729 | catfile($api->mirror_root, qw(extension pair.json)); 730 | $meta_012->{provides}{pair}{version} = '0.1.3'; 731 | $params->{meta} = $meta_012; 732 | ok $indexer->update_extensions($params), 'Replace 0.1.2 with 0.1.3'; 733 | ok $doc_data = $api->read_json_from($ext_file), 734 | 'Read the doc root extension data file one more time'; 735 | $exp->{stable}{version} = '0.1.3'; 736 | $exp->{versions}{'0.1.3'} = delete $exp->{versions}{'0.1.2'}; 737 | is_deeply $doc_data, $exp, 'Should now have the 0.1.3 metadata'; 738 | 739 | is_deeply shift @{ $indexer->to_index->{extensions} }, { 740 | abstract => 'A key/value pair dåtå type', 741 | date => '2010-11-10T12:18:03Z', 742 | dist => 'pair', 743 | docpath => '', 744 | extension => 'pair', 745 | key => 'pair', 746 | user => 'theory', 747 | user_name => 'David E. Wheeler', 748 | version => '0.1.3', 749 | }, 'Should have extension index data again'; 750 | 751 | ############################################################################## 752 | # Test parse_docs(). 753 | my $sync = PGXN::API::Sync->new( 754 | source => 'rsync://localhost/pgxn', 755 | rsync_path => catfile qw(t bin), 'testrsync' . (PGXN::API::Sync::WIN32 ? '.bat' : '') 756 | ); 757 | my $pgz = catfile qw(dist pair 0.1.0 pair-0.1.0.zip); 758 | 759 | $params->{meta} = $meta; 760 | ok $params->{zip} = $sync->unzip($pgz, {name => 'pair'}), "Unzip $pgz"; 761 | 762 | my $doc_dir = catdir $doc_root, qw(dist pair 0.1.0); 763 | $readme = catfile $doc_dir, 'README.html'; 764 | my $doc = catfile $doc_dir, 'doc', 'pair.html'; 765 | file_exists_ok $doc_dir, 'Directory dist/pair/0.1.0 should exist'; 766 | file_not_exists_ok $readme, 'dist/pair/0.1.0/README.txt should not exist'; 767 | file_not_exists_ok $doc, 'dist/pair/pair/0.1.0/doc/pair.html should not exist'; 768 | 769 | is_deeply $indexer->to_index, { 770 | map { $_ => [] } qw(docs dists extensions users tags) 771 | }, 'Should start with no docs to index'; 772 | 773 | $meta->{docs} = $indexer->parse_docs($params); 774 | is_deeply $meta->{docs}, { 775 | 'README' => { title => 'pair 0.1.0' }, 776 | 'doc/pair' => { title => 'pair 0.1.0', abstract => 'A key/value pair data type' }, 777 | }, 'Should get array of docs from parsing'; 778 | ok !exists $meta->{provides}{README}, 779 | 'Should hot have autovivified README into provides'; 780 | 781 | ok my $body = delete $indexer->to_index->{docs}[0]{body}, 782 | 'Should have document body'; 783 | 784 | like $body, qr/^pair 0[.]1[.]0\b/ms, 'Should look like plain text'; 785 | unlike $body, qr/<[^>]+>/, 'Should have nothing that looks like HTML'; 786 | unlike $body, qr/&[^;];/, 'Should have nothing that looks like an entity'; 787 | unlike $body, qr/ Contents/ms, 'Should not have contents'; 788 | 789 | is_deeply shift @{ $indexer->to_index->{docs} }, { 790 | abstract => 'A key/value pair data type', 791 | date => '2010-10-18T15:24:21Z', 792 | dist => 'pair', 793 | key => 'pair/doc/pair', 794 | user => 'Theory', 795 | docpath => 'doc/pair', 796 | title => 'pair 0.1.0', 797 | user_name => 'David E. Wheeler', 798 | version => '0.1.0', 799 | }, 'Should have document data for indexing'; 800 | 801 | is_deeply $indexer->to_index->{docs}, [], 'Should be no other documents to index'; 802 | 803 | file_exists_ok $doc_dir, 'Directory dist/pair/pair-0.1.0 should now exist'; 804 | file_exists_ok $readme, 'dist/pair/pair/0.1.0/README.html should now exist'; 805 | file_contents_like $readme, qr/
make/, 'Fenced code should be a 
 block';
 806 | file_exists_ok $doc, 'dist/pair/pair-0.1.0/doc/pair.html should now exist';
 807 | file_contents_like $readme, qr{\Q

pair 0.1.0

}, 808 | 'README.html should have HTML'; 809 | 810 | file_contents_unlike $readme, qr{pair 0.1.0}, 813 | 'Doc should have preformatted HTML'; 814 | file_contents_unlike $doc, qr{to_index->{docs} = []; 819 | $meta->{no_index} = { file => ['doc/pair.md'] }; 820 | $docs = $indexer->parse_docs($params); 821 | is_deeply $docs, { 822 | 'README' => { title => 'pair 0.1.0' }, 823 | }, 'Doc parser should ignore no_index-specified doc file'; 824 | 825 | $indexer->to_index->{docs} = []; 826 | $meta->{no_index} = { directory => ['doc']}; 827 | $docs = $indexer->parse_docs($params); 828 | is_deeply $docs, { 829 | 'README' => { title => 'pair 0.1.0' }, 830 | }, 'Doc parser should ignore no_index-specified doc directory'; 831 | 832 | delete $meta->{no_index}; 833 | 834 | # Index the README as docs if it's listed in docfile. 835 | $indexer->to_index->{docs} = []; 836 | delete $meta->{provides}{pair}{docpath}; 837 | $meta->{provides}{pair}{docfile} = 'README.md'; 838 | $docs = $indexer->parse_docs($params); 839 | is_deeply $docs, { 840 | 'README' => { title => 'pair 0.1.0', extension => 'pair' }, 841 | 'doc/pair' => { title => 'pair 0.1.0', abstract => 'A key/value pair data type' }, 842 | }, 'Doc parser should parse the README as the pair extension and also the doc/pair docs'; 843 | is @{ $indexer->to_index->{docs} }, 2, 'Should have two docs to index'; 844 | is $indexer->to_index->{docs}[0]{docpath}, 'README', 845 | 'The first should be the the README listed in docfile'; 846 | is $indexer->to_index->{docs}[1]{docpath}, 'doc/pair', 847 | 'The second should be the the doc/pair file'; 848 | 849 | # Try it with a package containing only a README, but keep the named docfile. 850 | $indexer->to_index->{docs} = []; 851 | delete $meta->{provides}{pair}{docpath}; 852 | ok $params->{zip}->removeMember('pair-0.1.0/doc/pair.md'), 853 | 'Remove doc directory'; 854 | $docs = $indexer->parse_docs($params); 855 | is_deeply $docs, { 856 | 'README' => { title => 'pair 0.1.0', extension => 'pair' }, 857 | }, 'Doc parser should parse the README as the pair extension doc'; 858 | is @{ $indexer->to_index->{docs} }, 1, 'Should have one doc to index'; 859 | is $indexer->to_index->{docs}[0]{docpath}, 'README', 860 | 'And it should be the README'; 861 | 862 | # Now remove the docfile spec. 863 | $indexer->to_index->{docs} = []; 864 | delete $meta->{provides}{pair}{docpath}; 865 | delete $meta->{provides}{pair}{docfile}; 866 | $docs = $indexer->parse_docs($params); 867 | is_deeply $docs, { 868 | 'README' => { title => 'pair 0.1.0' }, 869 | }, 'Doc parser should parse the README with its title as the sole doc'; 870 | is @{ $indexer->to_index->{docs} }, 1, 'Should have one doc to index'; 871 | is $indexer->to_index->{docs}[0]{docpath}, 'README', 872 | 'And it should be the README'; 873 | 874 | # Try it with an emptyish file. 875 | $indexer->to_index->{docs} = []; 876 | $params->{zip}->addString('', 'pair-0.1.0/foo.pl'); 877 | touch(catfile $indexer->doc_root_file_for(source => $params->{meta}), 'foo.pl'); 878 | my $plhtml = catfile $doc_dir, 'foo.html'; 879 | file_not_exists_ok $plhtml, 'dist/pair/pair-0.1.0/foo.html should not exist'; 880 | $docs = $indexer->parse_docs($params); 881 | file_not_exists_ok $plhtml, 'dist/pair/pair-0.1.0/foo.html still should not exist'; 882 | is_deeply $meta->{docs}, { 883 | 'README' => { title => 'pair 0.1.0' }, 884 | 'doc/pair' => { title => 'pair 0.1.0', abstract => 'A key/value pair data type' }, 885 | }, 'Should array of docs excluding file with no docs'; 886 | 887 | 888 | ############################################################################## 889 | # Make sure that add_document() calls all the necessary methods. 890 | my @called; 891 | my @meths = qw( 892 | copy_files 893 | merge_distmeta 894 | update_user 895 | update_tags 896 | update_extensions 897 | ); 898 | for my $meth (@meths) { 899 | $mock->mock($meth => sub { 900 | push @called => $meth; 901 | is $_[1], $params, "Params should have been passed to $meth"; 902 | }) 903 | } 904 | 905 | $params->{meta} = $meta; 906 | ok $indexer->add_distribution($params), 'Call add_distribution()'; 907 | is_deeply \@called, \@meths, 'The proper meths should have been called in order'; 908 | $mock->unmock_all; 909 | 910 | ############################################################################## 911 | # Make sure transaction stuff works. 912 | ok !$indexer->_rollback, 'Rollback'; 913 | is_deeply $indexer->to_index, { 914 | map { $_ => [] } qw(docs dists extensions users tags) 915 | }, 'Should start with no docs to index'; 916 | $doc = { 917 | key => 'foo', 918 | dist => 'explain', 919 | abstract => 'explanation: 0.1.3, 0.2.4', 920 | }; 921 | 922 | ok $indexer->_index(dists =>$doc), 'Index a doc'; 923 | is_deeply $indexer->to_index->{dists}, [$doc], 'Should have it in docs'; 924 | ok !$indexer->_rollback, 'Rollback should return false'; 925 | is_deeply $indexer->to_index, { 926 | map { $_ => [] } qw(docs dists extensions users tags) 927 | }, 'Should have no docs to index again'; 928 | 929 | # Test full text search indexing. 930 | ok $indexer->_index(dists => $doc), 'Index a doc again'; 931 | file_not_exists_ok catdir($doc_root, '_index'), 'Should not have index dir yet'; 932 | 933 | isa_ok $indexer->indexer_for($_), 'Lucy::Index::Indexer', "$_ indexer" 934 | for qw(docs dists extensions users tags); 935 | ok $indexer->_commit, 'Commit that doc'; 936 | file_exists_ok catdir($doc_root, '_index'), 'Should now have index dir'; 937 | is_deeply $indexer->to_index, { 938 | map { $_ => [] } qw(docs dists extensions users tags) 939 | }, 'Should once again have no docs to index'; 940 | 941 | ############################################################################## 942 | # Test _get_user_name(). 943 | is $indexer->_get_user_name({ user => 'theory'}), 'David E. Wheeler', 944 | '_get_user_name() should work'; 945 | is $indexer->_get_user_name({ user => 'theory'}), 'David E. Wheeler', 946 | '_get_user_name() should return same name for same nick'; 947 | is $indexer->_get_user_name({user => 'fred'}), 'Fred Flintstone', 948 | '_get_user_name() should return diff name for diff nick'; 949 | 950 | ############################################################################## 951 | # Time to actually add some stuff to the index. 952 | $mock->unmock_all; 953 | 954 | $params = { meta => $meta, zip => $zip }; 955 | ok $indexer->add_distribution($params), 'Index pair 0.1.0'; 956 | 957 | ok my $searcher = PGXN::API::Searcher->new($doc_root), 'Instantiate a searcher'; 958 | 959 | # Let's search it! 960 | ok my $res = $searcher->search(query => 'data', in => 'dists'), 961 | 'Search dists for "data"'; 962 | is $res->{count}, 1, 'Should have one result'; 963 | is $res->{hits}[0]{abstract}, 'A key/value pair data type', 964 | 'It should have the distribution'; 965 | is $res->{hits}[0]{version}, '0.1.0', 'It should be 0.1.0'; 966 | 967 | # Search the docs. 968 | ok $res = $searcher->search(query => 'composite', in => 'docs'), 969 | 'Search docs for "composite"'; 970 | is $res->{count}, 1, 'Should have one result'; 971 | is $res->{hits}[0]{abstract}, 'A key/value pair data type', 972 | 'It should have the abstract'; 973 | like $res->{hits}[0]{excerpt}, qr{\Qtwo-value composite type}, 974 | 'It should have the excerpt'; 975 | is $res->{hits}[0]{dist}, 'pair', 'It should be in dist "pair"'; 976 | is $res->{hits}[0]{version}, '0.1.0', 'It should be in 0.1.0'; 977 | 978 | ############################################################################## 979 | # Now index 0.1.1 (testing). 980 | $meta_011 = $api->read_json_from( 981 | catfile $api->mirror_root, qw(dist pair 0.1.1 META.json) 982 | ); 983 | $pgz = catfile qw(dist pair 0.1.1 pair-0.1.1.zip); 984 | $params->{meta} = $meta_011; 985 | ok $params->{zip} = $sync->unzip($pgz, {name => 'pair'}), "Unzip $pgz"; 986 | ok $indexer->add_distribution($params), 'Index pair 0.1.1'; 987 | is_deeply $indexer->to_index, { 988 | map { $_ => [] } qw(docs dists extensions users tags) 989 | }, 'Should have no index updates for test dist'; 990 | 991 | # The previous stable release should still be indexed. 992 | ok $searcher = PGXN::API::Searcher->new($doc_root), 'Instantiate another searcher'; 993 | ok $res = $searcher->search(query => 'data', in => 'dists'), 994 | 'Search dists for "data" again'; 995 | is $res->{count}, 1, 'Should have one result'; 996 | is $res->{hits}[0]{abstract}, 'A key/value pair data type', 997 | 'It should have the distribution'; 998 | is $res->{hits}[0]{version}, '0.1.0', 'It should still be 0.1.0'; 999 | 1000 | # Search docs. 1001 | ok $res = $searcher->search(query => 'composite'), 1002 | 'Search docs for "composite" again'; 1003 | is $res->{count}, 1, 'Should also have one result'; 1004 | is $res->{hits}[0]{abstract}, 'A key/value pair data type', 1005 | 'It should have the same abstract'; 1006 | like $res->{hits}[0]{excerpt}, qr{\Qtwo-value composite type}, 1007 | 'It should have the same excerpt'; 1008 | is $res->{hits}[0]{dist}, 'pair', 'It should still be in dist "pair"'; 1009 | is $res->{hits}[0]{version}, '0.1.0', 'It should still be in 0.1.0'; 1010 | 1011 | ############################################################################## 1012 | # Make 0.1.1 stable and try again. 1013 | $meta_011->{release_status} = 'stable'; 1014 | ok $indexer->add_distribution($params), 'Index pair 0.1.1 stable'; 1015 | 1016 | # Now it should be updated. 1017 | ok $searcher = PGXN::API::Searcher->new($doc_root), 'Instantiate the searcher'; 1018 | ok $res = $searcher->search(query => 'dåtå', in => 'dists'), 1019 | 'Search dists for "dåtå"'; 1020 | is $res->{count}, 1, 'Should have one result'; 1021 | is $res->{hits}[0]{abstract}, 'A key/value pair dåtå type', 1022 | 'It should have the distribution'; 1023 | is $res->{hits}[0]{version}, '0.1.1', 'It should still be 0.1.1'; 1024 | 1025 | # The query for "data" should stil return the one record. 1026 | ok $res = $searcher->search(query => 'data', in => 'dists'), 1027 | 'Search dists for "data" one last time'; 1028 | is $res->{count}, 1, 'Should have one result'; 1029 | is $res->{hits}[0]{abstract}, 'A key/value pair dåtå type', 1030 | 'It should have the distribution'; 1031 | is $res->{hits}[0]{version}, '0.1.1', 'It should still be 0.1.1'; 1032 | 1033 | # Search docs. 1034 | ok $res = $searcher->search(query => 'composite'), 1035 | 'Search docs for "composite" once more'; 1036 | is $res->{count}, 1, 'Should again have one result'; 1037 | is $res->{hits}[0]{abstract}, 'A key/value pair dåtå type', 1038 | 'It should have the updated abstract'; 1039 | like $res->{hits}[0]{excerpt}, qr{\Qtwo-value composite type}, 1040 | 'It should have the same excerpt'; 1041 | is $res->{hits}[0]{dist}, 'pair', 'It should still be in dist "pair"'; 1042 | is $res->{hits}[0]{version}, '0.1.1', 'It should now be in 0.1.1'; 1043 | 1044 | ############################################################################## 1045 | # Test update_user_lists(). Add another user name. 1046 | my $unames = $indexer->_user_names; 1047 | $unames->{Tom} = 'Tom Lane'; 1048 | 1049 | my $f = catfile($doc_root, 'users', 'f.json'); 1050 | my $t = catfile($doc_root, 'users', 't.json'); 1051 | file_not_exists_ok $f, 'f.json should not exist'; 1052 | file_not_exists_ok $t, 't.json should not exist'; 1053 | ok $indexer->update_user_lists, 'Update user lists'; 1054 | file_exists_ok $f, 'f.json should now exist'; 1055 | file_exists_ok $t, 't.json should now exist'; 1056 | my $tdata = $api->read_json_from($t); 1057 | is_deeply $tdata, [ 1058 | { user => 'theory', name => 'David E. Wheeler' }, 1059 | { user => 'Tom', name => 'Tom Lane' }, 1060 | ], 't.json should have both users'; 1061 | my $fdata = $api->read_json_from($f); 1062 | is_deeply $fdata, [ 1063 | { user => 'fred', name => 'Fred Flintstone' }, 1064 | ], 'f.json should have fred'; 1065 | 1066 | # Update tom and David and add a new T name. 1067 | delete $unames->{Tom}; 1068 | $unames->{tom} = 'Tom G. Lane'; 1069 | $unames->{tony} = 'Tony Vanilla'; 1070 | 1071 | ok $indexer->update_user_lists, 'Update user lists again'; 1072 | $tdata = $api->read_json_from($t); 1073 | is_deeply $tdata, [ 1074 | { user => 'theory', name => 'David E. Wheeler' }, 1075 | { user => 'tom', name => 'Tom G. Lane' }, 1076 | { user => 'tony', name => 'Tony Vanilla' }, 1077 | ], 't.json should have the updated user info'; 1078 | 1079 | ############################################################################## 1080 | # Now test user merging. 1081 | my $fred = catfile $doc_root, qw(user fred.json); 1082 | file_not_exists_ok $fred, 'fred.json should not exist'; 1083 | 1084 | # Should ignore user in user_names. 1085 | ok $indexer->merge_user('fred'), 'Merge fred.json'; 1086 | file_not_exists_ok $fred, 'fred.json still should not exist'; 1087 | 1088 | # Remove and update again. 1089 | delete $indexer->_user_names->{fred}; 1090 | ok $indexer->merge_user('fred'), 'Merge fred.json'; 1091 | file_exists_ok $fred, 'fred.json should now exist'; 1092 | is $indexer->_user_names->{fred}, 'Fred Flintstone', 1093 | 'And Fred should be back in the user name lookup'; 1094 | is_deeply $api->read_json_from($fred), { 1095 | email => 'fred@flintstone.com', 1096 | name => 'Fred Flintstone', 1097 | nickname => 'fred', 1098 | releases => {}, 1099 | twitter => 'fred', 1100 | uri => "http://fred.flintstone.com/" 1101 | }, 'And it should have all the necessary data'; 1102 | 1103 | is_deeply shift @{ $indexer->to_index->{users} }, { 1104 | details => 'fred', 1105 | name => 'Fred Flintstone', 1106 | uri => 'http://fred.flintstone.com/', 1107 | email => 'fred@flintstone.com', 1108 | key => 'fred', 1109 | user => 'fred', 1110 | }, 'Should have index data for Fred'; 1111 | 1112 | # Update theory on the mirror. 1113 | my $theory = catfile $api->mirror_root, qw(user theory.json); 1114 | my $data = $api->read_json_from($theory); 1115 | $data->{name} = 'David Wheeler'; 1116 | $data->{uri} = 'http://www.justatheory.com/'; 1117 | $api->write_json_to($theory, $data); 1118 | 1119 | # Now merge theory. 1120 | delete $indexer->_user_names->{theory}; 1121 | ok $indexer->merge_user('theory'), 'Merge theory.json'; 1122 | is $indexer->_user_names->{theory}, 'David Wheeler', 1123 | 'And Theory should be back in the user name lookup'; 1124 | is_deeply $api->read_json_from(catfile $doc_root, qw(user theory.json)), { 1125 | email => 'david@justatheory.com', 1126 | name => 'David Wheeler', 1127 | nickname => 'theory', 1128 | releases => { 1129 | pair => { 1130 | abstract => "A key/value pair d\xE5t\xE5 type", 1131 | stable => [ 1132 | { date => "2010-11-03T06:23:28Z", version => "0.1.2" }, 1133 | { date => "2010-10-19T03:59:54Z", version => "0.1.0" }, 1134 | ], 1135 | testing => [{ date => "2010-10-29T22:44:42Z", version => "0.1.1" }], 1136 | }, 1137 | pgTAP => { 1138 | stable => [{ date => "2011-02-02T03:25:17Z", version => "0.25.0" }], 1139 | }, 1140 | semver => { 1141 | stable => [{ date => "2011-02-05T19:31:38Z", version => "0.2.0" }], 1142 | }, 1143 | }, 1144 | uri => "http://www.justatheory.com/" 1145 | }, 'And it should have the updated data'; 1146 | 1147 | is_deeply shift @{ $indexer->to_index->{users} }, { 1148 | details => '', 1149 | email => 'david@justatheory.com', 1150 | key => 'theory', 1151 | name => 'David Wheeler', 1152 | uri => 'http://www.justatheory.com/', 1153 | user => 'theory', 1154 | }, 'Should have updated index data for Theory'; 1155 | 1156 | ############################################################################## 1157 | # Test finalize(). 1158 | @called = (); 1159 | $mock->mock(update_user_lists => sub { push @called, 'update_user_lists' }); 1160 | $mock->mock(_commit => sub { push @called, '_commit' }); 1161 | ok $indexer->finalize, 'Call finalize()'; 1162 | is_deeply \@called, [qw(update_user_lists _commit)], 1163 | 'update_user_lists() and commit() should have been called'; 1164 | 1165 | ############################################################################## 1166 | # Test find_docs(). 1167 | touch(catfile $indexer->doc_root_file_for(source => $params->{meta}), qw(sql hi.mkdn)); 1168 | $params->{meta}{provides}{pair}{docfile} = 'sql/hi.mkdn'; 1169 | is_deeply [ sort { $a->{filename} cmp $b->{filename} } $indexer->find_docs($params)], [ 1170 | { filename => 'README.md' }, 1171 | { filename => 'doc/pair.md' }, 1172 | { filename => 'sql/hi.mkdn', extension => 'pair' }, 1173 | ], 'find_docs() should find specified and random doc files'; 1174 | 1175 | $params->{meta}{no_index} = { file => ['sql/hi.mkdn'] }; 1176 | is_deeply [ sort { $a->{filename} cmp $b->{filename} } $indexer->find_docs($params)], [ 1177 | { filename => 'README.md' }, 1178 | { filename => 'doc/pair.md' }, 1179 | { filename => 'sql/hi.mkdn', extension => 'pair' }, 1180 | ], 'find_docs() no_index should be ignored for specified doc file'; 1181 | 1182 | $params->{meta}{no_index} = { file => ['doc/pair.md'] }; 1183 | is_deeply [ sort { $a->{filename} cmp $b->{filename} } $indexer->find_docs($params)], [ 1184 | { filename => 'README.md' }, 1185 | { filename => 'sql/hi.mkdn', extension => 'pair' }, 1186 | ], 'find_docs() should respect no_index for found docs'; 1187 | 1188 | $params->{meta}{no_index} = { directory => ['sql'] }; 1189 | is_deeply [ sort { $a->{filename} cmp $b->{filename} } $indexer->find_docs($params)], [ 1190 | { filename => 'README.md' }, 1191 | { filename => 'doc/pair.md' }, 1192 | { filename => 'sql/hi.mkdn', extension => 'pair' }, 1193 | ], 'find_docs() should ignore no_index directory for specified doc'; 1194 | 1195 | $params->{meta}{no_index} = { directory => ['doc'] }; 1196 | is_deeply [ sort { $a->{filename} cmp $b->{filename} } $indexer->find_docs($params)], [ 1197 | { filename => 'README.md' }, 1198 | { filename => 'sql/hi.mkdn', extension => 'pair' }, 1199 | ], 'find_docs() should respect no_index directory for found docs'; 1200 | 1201 | delete $params->{meta}{no_index}; 1202 | $params->{meta}{provides}{pair}{docfile} = 'foo/bar.txt'; 1203 | is_deeply [ sort { $a->{filename} cmp $b->{filename} } $indexer->find_docs($params)], [ 1204 | { filename => 'README.md' }, 1205 | { filename => 'doc/pair.md' }, 1206 | ], 'find_docs() should ignore non-existent specified file'; 1207 | 1208 | $params->{meta}{provides}{pair}{docfile} = 'doc/pair.md'; 1209 | is_deeply [ sort { $a->{filename} cmp $b->{filename} } $indexer->find_docs($params)], [ 1210 | { filename => 'README.md' }, 1211 | { filename => 'doc/pair.md', extension => 'pair' }, 1212 | ], 'find_docs() should not return dupes'; 1213 | 1214 | $params->{meta}{provides}{pair}{docfile} = 'doc/pair.pdf'; 1215 | touch(catfile $indexer->doc_root_file_for(source => $params->{meta}), qw(doc pair.pdf)); 1216 | 1217 | is_deeply [ sort { $a->{filename} cmp $b->{filename} } $indexer->find_docs($params)], [ 1218 | { filename => 'README.md' }, 1219 | { filename => 'doc/pair.md' }, 1220 | ], 'find_docs() should ignore doc files it does not know how to parse'; 1221 | 1222 | sub touch { 1223 | my $fn = shift; 1224 | open my $fh, '>', $fn or die "Cannot open $fn: $!\n"; 1225 | close $fh; 1226 | } 1227 | -------------------------------------------------------------------------------- /t/pod-coverage.t: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env perl -w 2 | 3 | use strict; 4 | use warnings; 5 | use Test::More; 6 | 7 | eval "use Test::Pod::Coverage 1.06"; 8 | plan skip_all => 'Test::Pod::Coverage 1.06 required' if $@; 9 | all_pod_coverage_ok({ also_private => [ qr/^[A-Z_]+(?:32)?$/ ] }); 10 | -------------------------------------------------------------------------------- /t/pod-spelling.t: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env perl -w 2 | 3 | use strict; 4 | use Test::More; 5 | eval "use Test::Spelling"; 6 | plan skip_all => "Test::Spelling required for testing POD spelling" if $@; 7 | 8 | add_stopwords(); 9 | all_pod_files_spelling_ok(); 10 | 11 | __DATA__ 12 | API 13 | API's 14 | APIs 15 | browsable 16 | CPAN 17 | CPAN 18 | crontab 19 | GitHub 20 | JSON 21 | merchantability 22 | metadata 23 | middleware 24 | pgTAP 25 | PGXN 26 | Plack 27 | PostgreSQL 28 | PSGI 29 | RDBMS 30 | SHA 31 | subdirectory 32 | superset 33 | TCP 34 | -------------------------------------------------------------------------------- /t/pod.t: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env perl -w 2 | 3 | use strict; 4 | use Test::More; 5 | eval "use Test::Pod 1.41"; 6 | plan skip_all => "Test::Pod 1.41 required for testing POD" if $@; 7 | all_pod_files_ok(); 8 | -------------------------------------------------------------------------------- /t/root/dist/pair.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pair", 3 | "releases": { 4 | "stable": [ 5 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 6 | ] 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /t/root/dist/pair/0.1.0/META.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pair", 3 | "abstract": "A key/value pair data type", 4 | "description": "This library contains a single PostgreSQL extension, a key/value pair data type called `pair`, along with a convenience function for constructing key/value pairs.", 5 | "version": "0.1.0", 6 | "maintainer": [ 7 | "David E. Wheeler " 8 | ], 9 | "date": "2010-10-18T15:24:21Z", 10 | "release_status": "stable", 11 | "user": "Theory", 12 | "sha1": "1234567890abcdef1234567890abcdef12345678", 13 | "license": "postgresql", 14 | "provides": { 15 | "pair": { 16 | "abstract": "A key/value pair data type", 17 | "file": "sql/pair.sql", 18 | "version": "0.1.0" 19 | } 20 | }, 21 | "tags": ["ordered pair", "Pair"], 22 | "resources": { 23 | "bugtracker": { 24 | "web": "http://github.com/theory/kv-pair/issues/" 25 | }, 26 | "repository": { 27 | "type": "git", 28 | "url": "git://github.com/theory/kv-pair.git", 29 | "web": "http://github.com/theory/kv-pair/" 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /t/root/dist/pair/0.1.0/pair-0.1.0.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgxn/pgxn-api/57b4cc21fffcd9faf9c8f0663b424993b389bfa6/t/root/dist/pair/0.1.0/pair-0.1.0.zip -------------------------------------------------------------------------------- /t/root/dist/pair/0.1.1/META.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pair", 3 | "abstract": "A key/value pair dåtå type", 4 | "description": "This library contains a single PostgreSQL extension, a key/value pair data type called `pair`, along with a convenience function for constructing pairs.", 5 | "version": "0.1.1", 6 | "maintainer": [ 7 | "David E. Wheeler " 8 | ], 9 | "date": "2010-10-29T22:46:45Z", 10 | "release_status": "testing", 11 | "user": "theory", 12 | "sha1": "443cbcf678a3c2f479c4c069bcb96054d9b25a32", 13 | "license": "postgresql", 14 | "provides": { 15 | "pair": { 16 | "abstract": "A key/value pair dåtå type", 17 | "file": "sql/pair.sql", 18 | "version": "0.1.1" 19 | } 20 | }, 21 | "tags": ["ordered pair", "pair", "key value"], 22 | "resources": { 23 | "bugtracker": { 24 | "web": "http://github.com/theory/kv-pair/issues/" 25 | }, 26 | "repository": { 27 | "type": "git", 28 | "url": "git://github.com/theory/kv-pair.git", 29 | "web": "http://github.com/theory/kv-pair/" 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /t/root/dist/pair/0.1.1/README.txt: -------------------------------------------------------------------------------- 1 | pair 0.1.0 2 | ========== 3 | 4 | This library contains a single PostgreSQL extension, a key/value pair data 5 | type called `pair`, along with a convenience function for constructing 6 | key/value pairs. It's just a simple thing, really: a two-value composite type 7 | that can store any type of value in its slots, which are named "k" and "v". 8 | 9 | The `pair` data type was created as an inspiration, as documented in 10 | [this blog post](http://justatheory.com/computers/databases/postgresql/key-value-pairs.html). 11 | Give it a read if you're interested in the context of its creation. 12 | 13 | To build it, just do this: 14 | 15 | make 16 | make installcheck 17 | make install 18 | 19 | If you encounter an error such as: 20 | 21 | "Makefile", line 8: Need an operator 22 | 23 | You need to use GNU make, which may well be installed on your system as 24 | `gmake`: 25 | 26 | gmake 27 | gmake install 28 | gmake installcheck 29 | 30 | If you encounter an error such as: 31 | 32 | make: pg_config: Command not found 33 | 34 | Be sure that you have `pg_config` installed and in your path. If you used a 35 | package management system such as RPM to install PostgreSQL, be sure that the 36 | `-devel` package is also installed. If necessary tell the build process where 37 | to find it: 38 | 39 | env PG_CONFIG=/path/to/pg_config make && make installcheck && make install 40 | 41 | And finally, if all that fails (and if you're on PostgreSQL 8.1 or lower, it 42 | likely will), copy the entire distribution directory to the `contrib/` 43 | subdirectory of the PostgreSQL source tree and try it there without 44 | `pg_config`: 45 | 46 | env NO_PGXS=1 make && make installcheck && make install 47 | 48 | If you encounter an error such as: 49 | 50 | ERROR: must be owner of database regression 51 | 52 | You need to run the test suite using a super user, such as the default 53 | "postgres" super user: 54 | 55 | make installcheck PGUSER=postgres 56 | 57 | Dependencies 58 | ------------ 59 | The `pair` date type has no dependencies other than PostgreSQL. 60 | 61 | Copyright and License 62 | --------------------- 63 | 64 | Copyright (c) 2010 David E. Wheeler. 65 | 66 | This module is free software; you can redistribute it and/or modify it under 67 | the [PostgreSQL License](http://www.opensource.org/licenses/postgresql). 68 | 69 | Permission to use, copy, modify, and distribute this software and its 70 | documentation for any purpose, without fee, and without a written agreement is 71 | hereby granted, provided that the above copyright notice and this paragraph 72 | and the following two paragraphs appear in all copies. 73 | 74 | In no event shall David E. Wheeler be liable to any party for direct, 75 | indirect, special, incidental, or consequential damages, including lost 76 | profits, arising out of the use of this software and its documentation, even 77 | if David E. Wheeler has been advised of the possibility of such damage. 78 | 79 | David E. Wheeler specifically disclaims any warranties, including, but not 80 | limited to, the implied warranties of merchantability and fitness for a 81 | particular purpose. The software provided hereunder is on an "as is" basis, 82 | and David E. Wheeler has no obligations to provide maintenance, support, 83 | updates, enhancements, or modifications. 84 | 85 | -------------------------------------------------------------------------------- /t/root/dist/pair/0.1.1/pair-0.1.1.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgxn/pgxn-api/57b4cc21fffcd9faf9c8f0663b424993b389bfa6/t/root/dist/pair/0.1.1/pair-0.1.1.zip -------------------------------------------------------------------------------- /t/root/dist/pair/0.1.2/META.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pair", 3 | "abstract": "A key/value pair dåtå type", 4 | "description": "This library contains a single PostgreSQL extension, a key/value pair data type called `pair`, along with a convenience function for constructing pairs.", 5 | "version": "0.1.2", 6 | "maintainer": [ 7 | "David E. Wheeler " 8 | ], 9 | "date": "2010-11-10T12:18:03Z", 10 | "release_status": "stable", 11 | "user": "theory", 12 | "sha1": "cebefd23151b4b797239646f7ae045b03d028fcf", 13 | "license": "postgresql", 14 | "provides": { 15 | "pair": { 16 | "abstract": "A key/value pair dåtå type", 17 | "file": "sql/pair.sql", 18 | "version": "0.1.2" 19 | } 20 | }, 21 | "tags": ["ordered pair", "pair", "key value"], 22 | "resources": { 23 | "bugtracker": { 24 | "web": "http://github.com/theory/kv-pair/issues/" 25 | }, 26 | "repository": { 27 | "type": "git", 28 | "url": "git://github.com/theory/kv-pair.git", 29 | "web": "http://github.com/theory/kv-pair/" 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /t/root/dist/pair/0.1.2/pair-0.1.2.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgxn/pgxn-api/57b4cc21fffcd9faf9c8f0663b424993b389bfa6/t/root/dist/pair/0.1.2/pair-0.1.2.zip -------------------------------------------------------------------------------- /t/root/dist/pgTAP.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pgTAP", 3 | "releases": { 4 | "stable": [ 5 | {"version": "0.25.0", "date": "2011-02-02T03:25:17Z"} 6 | ] 7 | } 8 | } -------------------------------------------------------------------------------- /t/root/dist/pgTAP/0.25.0/META.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pgTAP", 3 | "abstract": "Unit testing for PostgreSQL", 4 | "description": "pgTAP is a suite of database functions that make it easy to write TAP-emitting unit tests in psql scripts or xUnit-style test functions.", 5 | "version": "0.25.0", 6 | "maintainer": [ 7 | "David E. Wheeler ", 8 | "pgTAP List " 9 | ], 10 | "date": "2011-02-02T03:25:17Z", 11 | "release_status": "stable", 12 | "user": "theory", 13 | "sha1": "f7216d04ec2ca980a2491d8cbf6bed71c970dda9", 14 | "license": { 15 | "PostgreSQL": "http://www.postgresql.org/about/licence" 16 | }, 17 | "prereqs": { 18 | "runtime": { 19 | "recommends": { 20 | "PostgreSQL": "8.4.0" 21 | }, 22 | "requires": { 23 | "PostgreSQL": "8.0.0", 24 | "plpgsql": "0.0.0" 25 | } 26 | } 27 | }, 28 | "provides": { 29 | "pgtap": { 30 | "file": "pgtap.sql", 31 | "abstract": "Unit testing for PostgreSQL", 32 | "version": "0.25.0" 33 | } 34 | }, 35 | "tags": ["testing", "unit testing", "tap", "tddd", "test driven database development"], 36 | "resources": { 37 | "bugtracker": { 38 | "web": "https://github.com/theory/pgtap/issues" 39 | }, 40 | "homepage": "http://pgtap.org/", 41 | "repository": { 42 | "type": "git", 43 | "url": "https://github.com/theory/pgtap.git", 44 | "web": "https://github.com/theory/pgtap" 45 | } 46 | } 47 | } -------------------------------------------------------------------------------- /t/root/extension/pair.json: -------------------------------------------------------------------------------- 1 | { 2 | "extension": "pair", 3 | "latest": "stable", 4 | "stable": { "dist": "pair", "version": "0.1.0", "sha1": "1234567890abcdef1234567890abcdef12345678" }, 5 | "versions": { 6 | "0.1.0": [ 7 | { "dist": "pair", "version": "0.1.0" } 8 | ] 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /t/root/extension/pgtap.json: -------------------------------------------------------------------------------- 1 | { 2 | "extension": "pgtap", 3 | "latest": "stable", 4 | "stable": { "dist": "pgTAP", "version": "0.25.0", "sha1": "f7216d04ec2ca980a2491d8cbf6bed71c970dda9" }, 5 | "versions": { 6 | "0.25.0": [ 7 | { "dist": "pgTAP", "version": "0.25.0" } 8 | ] 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /t/root/index.json: -------------------------------------------------------------------------------- 1 | { 2 | "download": "/dist/{dist}/{version}/{dist}-{version}.zip", 3 | "readme": "/dist/{dist}/{version}/README.txt", 4 | "meta": "/dist/{dist}/{version}/META.json", 5 | "dist": "/dist/{dist}.json", 6 | "extension": "/extension/{extension}.json", 7 | "user": "/user/{user}.json", 8 | "tag": "/tag/{tag}.json", 9 | "stats": "/stats/{stats}.json", 10 | "mirrors": "/meta/mirrors.json", 11 | "spec": "/meta/spec.{format}" 12 | } 13 | -------------------------------------------------------------------------------- /t/root/meta/mirrors.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "uri": "http://pgxn.depesz.com/", 4 | "frequency": "every 6 hours", 5 | "location": "Nürnberg, Germany", 6 | "organization": "depesz Software Hubert Lubaczewski", 7 | "timezone": "CEST", 8 | "email": "depesz.com|web_pgxn", 9 | "bandwidth": "100Mbps", 10 | "src": "rsync://master.pgxn.org/pgxn/", 11 | "rsync": "", 12 | "notes": "access via http only" 13 | }, 14 | { 15 | "uri": "http://www.postgres-support.ch/pgxn/", 16 | "frequency": "hourly", 17 | "location": "Basel, Switzerland, Europe", 18 | "organization": "micro systems", 19 | "timezone": "CEST", 20 | "email": "msys.ch|marc", 21 | "bandwidth": "10Mbps", 22 | "src": "rsync://master.pgxn.org/pgxn", 23 | "rsync": "", 24 | "notes": "" 25 | }, 26 | { 27 | "uri": "http://kineticode.com/pgxn/", 28 | "frequency": "hourly", 29 | "location": "Portland, OR, USA", 30 | "organization": "Kineticode, Inc.", 31 | "timezone": "America/Los_Angeles", 32 | "email": "kineticode.com|pgxn", 33 | "bandwidth": "10MBps", 34 | "src": "rsync://master.pgxn.org/pgxn/", 35 | "rsync": "", 36 | "notes": "" 37 | }, 38 | { 39 | "uri": "http://pgxn.justatheory.com/", 40 | "frequency": "daily", 41 | "location": "Portland, OR, USA", 42 | "organization": "David E. Wheeler", 43 | "timezone": "America/Los_Angeles", 44 | "email": "justatheory.com|pgxn", 45 | "bandwidth": "Cable", 46 | "src": "rsync://master.pgxn.org/pgxn/", 47 | "rsync": "", 48 | "notes": "" 49 | }, 50 | { 51 | "uri": "http://pgxn.darkixion.com/", 52 | "frequency": "hourly", 53 | "location": "London, UK", 54 | "organization": "Thom Brown", 55 | "timezone": "Europe/London", 56 | "email": "darkixion.com|pgxn", 57 | "bandwidth": "1Gbps", 58 | "src": "rsync://master.pgxn.org/pgxn", 59 | "rsync": "rsync://pgxn.darkixion.com/pgxn", 60 | "notes": "" 61 | }, 62 | { 63 | "uri": "http://mirrors.cat.pdx.edu/pgxn/", 64 | "frequency": "hourly", 65 | "location": "Portland, OR, USA", 66 | "organization": "PSU Computer Action Team", 67 | "timezone": "America/Los_Angeles", 68 | "email": "cat.pdx.edu|support", 69 | "bandwidth": "60Mbsec", 70 | "src": "rsync://master.pgxn.org/pgxn", 71 | "rsync": "rsync://mirrors.cat.pdx.edu/pgxn", 72 | "notes": "I2 and IPv6" 73 | }, 74 | { 75 | "uri": "http://pgxn.dalibo.org/", 76 | "frequency": "hourly", 77 | "location": "Marseille, France", 78 | "organization": "DALIBO SARL", 79 | "timezone": "CEST", 80 | "email": "dalibo.com|contact", 81 | "bandwidth": "100Mbps", 82 | "src": "rsync://master.pgxn.org/pgxn/", 83 | "rsync": "", 84 | "notes": "" 85 | }, 86 | { 87 | "uri": "http://pgxn.cxsoftware.org/", 88 | "frequency": "hourly", 89 | "location": "Seattle, WA, USA", 90 | "organization": "CxNet", 91 | "timezone": "America/Los_Angeles", 92 | "email": "cxnet.cl|cristobal", 93 | "bandwidth": "100Mbps", 94 | "src": "rsync://master.pgxn.org/pgxn/", 95 | "rsync": "", 96 | "notes": "" 97 | } 98 | ] 99 | -------------------------------------------------------------------------------- /t/root/meta/spec.txt: -------------------------------------------------------------------------------- 1 | Name 2 | ==== 3 | 4 | PGXN Meta Spec - The PGXN distribution metadata specification 5 | 6 | -------------------------------------------------------------------------------- /t/root/meta/timestamp: -------------------------------------------------------------------------------- 1 | 1298991184 2 | # Do not delete this file 3 | -------------------------------------------------------------------------------- /t/root/tag/key value.json: -------------------------------------------------------------------------------- 1 | { 2 | "tag": "key value", 3 | "releases": { 4 | "pair": { 5 | "stable": [ 6 | {"version": "0.1.1", "date": "2010-10-29T22:44:42Z"}, 7 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 8 | ] 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /t/root/tag/ordered pair.json: -------------------------------------------------------------------------------- 1 | { 2 | "tag": "ordered pair", 3 | "releases": { 4 | "pair": { 5 | "stable": [ 6 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 7 | ] 8 | } 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /t/root/tag/pair.json: -------------------------------------------------------------------------------- 1 | { 2 | "tag": "pair", 3 | "releases": { 4 | "pair": { 5 | "stable": [ 6 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 7 | ] 8 | }, 9 | "pgTAP": { 10 | "stable": [ 11 | {"version": "0.25.0", "date": "2011-01-22T08:34:51Z"} 12 | ] 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /t/root/user/fred.json: -------------------------------------------------------------------------------- 1 | { 2 | "nickname": "fred", 3 | "name": "Fred Flintstone", 4 | "email": "fred@flintstone.com", 5 | "uri": "http://fred.flintstone.com/", 6 | "twitter": "fred" 7 | } 8 | -------------------------------------------------------------------------------- /t/root/user/theory.json: -------------------------------------------------------------------------------- 1 | { 2 | "nickname": "theory", 3 | "name": "David E. Wheeler", 4 | "email": "david@justatheory.com", 5 | "uri": "http://justatheory.com/", 6 | "releases": { 7 | "pair": { 8 | "stable": [ 9 | {"version": "0.1.0", "date": "2010-10-19T03:59:54Z"} 10 | ] 11 | }, 12 | "pgTAP": { 13 | "stable": [ 14 | {"version": "0.25.0", "date": "2011-02-02T03:25:17Z"} 15 | ] 16 | }, 17 | "semver": { 18 | "stable": [ 19 | {"version": "0.2.0", "date": "2011-02-05T19:31:38Z"} 20 | ] 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /t/router.t: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env perl -w 2 | 3 | use v5.14; 4 | use utf8; 5 | BEGIN { $ENV{EMAIL_SENDER_TRANSPORT} = 'Test' } 6 | use Test::More tests => 196; 7 | #use Test::More 'no_plan'; 8 | use Plack::Test; 9 | use Test::MockModule; 10 | use HTTP::Request::Common; 11 | use File::Spec::Functions qw(catdir catfile); 12 | use File::Copy::Recursive qw(dircopy fcopy); 13 | use File::Path qw(remove_tree); 14 | 15 | BEGIN { 16 | $File::Copy::Recursive::KeepMode = 0; 17 | use_ok 'PGXN::API::Router' or die; 18 | } 19 | 20 | # Set up the document root. 21 | my $doc_root = catdir 't', 'test_router_root'; 22 | my $api = PGXN::API->instance; 23 | $api->doc_root($doc_root); 24 | # On MSWin32, this somehow gets run before some tests when running under 25 | # `./Build test`. No idea why, so just avoid it and do it at the end of this 26 | # test and here on test start, in case stuff is left from a previous run. 27 | END { remove_tree $doc_root if $^O ne 'MSWin32' } 28 | remove_tree $doc_root; 29 | dircopy catdir(qw(t root)), $doc_root; 30 | $api->mirror_root(catdir 't', 'root'); 31 | 32 | my $search_mock = Test::MockModule->new('PGXN::API::Searcher'); 33 | my @params; 34 | $search_mock->mock(new => sub { bless {} => shift }); 35 | 36 | { 37 | local $@; 38 | eval { PGXN::API::Router->app }; 39 | is $@, "Missing required parameters errors_to and errors_from\n", 40 | 'Should get proper error for missing parameters'; 41 | } 42 | 43 | my $app = PGXN::API::Router->app( 44 | errors_to => 'alerts@pgxn.org', 45 | errors_from => 'api@pgxn.org', 46 | ); 47 | 48 | # Test the root index.json. 49 | test_psgi $app => sub { 50 | my $cb = shift; 51 | ok my $res = $cb->(GET '/index.json'), 'Fetch /index.json'; 52 | ok $res->is_success, 'It should be a success'; 53 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 54 | 'Should have API version in the header'; 55 | is $res->content_type, 'application/json', 'Should be application/json'; 56 | }; 57 | 58 | # Try a subdirectory JSON file. 59 | test_psgi $app => sub { 60 | my $cb = shift; 61 | my $uri = '/dist/pair/0.1.1/META.json'; 62 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 63 | ok $res->is_success, 'It should be a success'; 64 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 65 | 'Should have API version in the header'; 66 | is $res->content_type, 'application/json', 'Should be application/json'; 67 | }; 68 | 69 | # Try a JSONP request. 70 | test_psgi $app => sub { 71 | my $cb = shift; 72 | my $uri = '/dist/pair/0.1.1/META.json?callback=foo'; 73 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 74 | ok $res->is_success, 'It should be a success'; 75 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 76 | 'Should have API version in the header'; 77 | is $res->content_type, 'text/javascript', 'Should be text/javascript'; 78 | like $res->content, qr{\A(?:/[*]{2}/)?foo\(}, 'It should look like a JSONP response'; 79 | }; 80 | 81 | # Try a readme file. 82 | test_psgi $app => sub { 83 | my $cb = shift; 84 | my $uri = '/dist/pair/0.1.1/README.txt'; 85 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 86 | ok $res->is_success, 'It should be a success'; 87 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 88 | 'Should have API version in the header'; 89 | is $res->content_type, 'text/plain', 'Should be text/plain'; 90 | is $res->content_charset, 'UTF-8', 'Should be UTF-8'; 91 | }; 92 | 93 | # Try a distribution file. 94 | test_psgi $app => sub { 95 | my $cb = shift; 96 | my $uri = '/dist/pair/0.1.1/pair-0.1.1.zip'; 97 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 98 | ok $res->is_success, 'It should be a success'; 99 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 100 | 'Should have API version in the header'; 101 | is $res->content_type, 'application/zip', 'Should be application/zip'; 102 | }; 103 | 104 | # Try an HTML file. 105 | my $html = catfile qw(lib PGXN API index.html); 106 | test_psgi $app => sub { 107 | my $cb = shift; 108 | fcopy $html, $doc_root or die "Cannot copy $html to $doc_root: $!\n"; 109 | my $uri = '/index.html'; 110 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 111 | ok $res->is_success, 'It should be a success'; 112 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 113 | 'Should have API version in the header'; 114 | is $res->content_type, 'text/html', 'Should be text/html'; 115 | }; 116 | 117 | # Try the root directory. 118 | test_psgi $app => sub { 119 | my $cb = shift; 120 | fcopy $html, $doc_root or die "Cannot copy $html to $doc_root: $!\n"; 121 | ok my $res = $cb->(GET '/'), "Fetch /"; 122 | ok $res->is_success, 'It should be a success'; 123 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 124 | 'Should have API version in the header'; 125 | is $res->content_type, 'text/html', 'Should be text/html'; 126 | }; 127 | 128 | # Create a src directory. 129 | my $src = catdir $doc_root, qw(dist/pair); 130 | my $dst = catdir $doc_root, qw(src pair); 131 | dircopy $src, $dst or die "Cannot copy dir $src to $dst: $!"; 132 | fcopy $html, $dst or die "Cannot copy $html to $doc_root: $!\n"; 133 | 134 | # Try a src/json file. 135 | test_psgi $app => sub { 136 | my $cb = shift; 137 | my $uri = 'src/pair/0.1.0/META.json'; 138 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 139 | ok $res->is_success, 'It should be a success'; 140 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 141 | 'Should have API version in the header'; 142 | is $res->content_type, 'text/plain', 'Should be text/plain'; 143 | }; 144 | 145 | # Try a src/readme file 146 | test_psgi $app => sub { 147 | my $cb = shift; 148 | my $uri = 'src/pair/0.1.1/README.txt'; 149 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 150 | ok $res->is_success, 'It should be a success'; 151 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 152 | 'Should have API version in the header'; 153 | is $res->content_type, 'text/plain', 'Should be text/plain'; 154 | }; 155 | 156 | # Try a src/html file. 157 | test_psgi $app => sub { 158 | my $cb = shift; 159 | my $uri = 'src/pair/index.html'; 160 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 161 | ok $res->is_success, 'It should be a success'; 162 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 163 | 'Should have API version in the header'; 164 | is $res->content_type, 'text/plain', 'Should be text/plain'; 165 | }; 166 | 167 | # Try a src directory.. 168 | test_psgi $app => sub { 169 | my $cb = shift; 170 | my $uri = 'src/pair/'; 171 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 172 | ok $res->is_success, 'It should be a success'; 173 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 174 | 'Should have API version in the header'; 175 | is $res->content_type, 'text/html', 'Should be text/html'; 176 | like $res->content, qr/Parent Directory/, 177 | 'Should look like a directory listing'; 178 | }; 179 | 180 | # Make sure /_index always 404s. 181 | test_psgi $app => sub { 182 | my $cb = shift; 183 | for my $uri (qw( _index _index/ _index/foo _index/index.html)) { 184 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 185 | ok $res->is_error, "$uri should respond with an error"; 186 | is $res->code, 404, "$uri should 404"; 187 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 188 | 'Should have API version in the header'; 189 | } 190 | }; 191 | 192 | # Give the search engine a spin. 193 | test_psgi $app => sub { 194 | my $cb = shift; 195 | $search_mock->mock(search => sub { 196 | shift; @params = @_; return { foo => 1 } 197 | }); 198 | my $q = 'q=whü&o=2&l=10'; 199 | my @exp = ( query => 'whü', offset => 2, limit => 10 ); 200 | for my $in (qw(docs dists extensions users tags)) { 201 | for my $slash ('', '/') { 202 | my $uri = "/search/$in$slash?$q"; 203 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 204 | ok $res->is_success, "$uri should return success"; 205 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 206 | 'Should have API version in the header'; 207 | is $res->content_type, 'application/json', 'Should be application/json'; 208 | is $res->content, '{"foo":1}', 'Content should be JSON of results'; 209 | is_deeply \@params, [in => $in, @exp], 210 | "$uri should properly dispatch to the searcher"; 211 | } 212 | } 213 | 214 | # Try a JSONP request. 215 | my $uri = "/search/docs?q=foo&callback=bar"; 216 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 217 | ok $res->is_success, "$uri should return success"; 218 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 219 | 'Should have API version in the header'; 220 | is $res->content_type, 'text/javascript', 221 | 'Should be application/javascript'; 222 | like $res->content, qr{(?:/[*]{2}/)?\Qbar({"foo":1})}, 223 | 'Content should be JSONP of results'; 224 | 225 | # Now make sure we get the proper 404s. 226 | for my $uri (qw( 227 | /search 228 | /search/foo 229 | /search/foo/ 230 | /search/tag/foo 231 | /search/tag/foo/ 232 | )) { 233 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 234 | ok $res->is_error, "$uri should respond with an error"; 235 | is $res->code, 404, "$uri should 404"; 236 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 237 | 'Should have API version in the header'; 238 | } 239 | 240 | # And that we get a 400 when there's no q param. 241 | $uri = '/search/docs'; 242 | ok $res = $cb->(GET $uri), "Fetch $uri"; 243 | ok $res->is_error, "$uri should respond with an error"; 244 | is $res->code, 400, "$uri should 400"; 245 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 246 | 'Should have API version in the header'; 247 | is $res->content, 'Bad request: Invalid or missing "q" query param.', 248 | 'Should get proper error message'; 249 | 250 | # And that we get a 400 for an invalid q param. 251 | for my $q ('', '*', '?') { 252 | my $uri = "/search/docs?q=$q"; 253 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 254 | ok $res->is_error, "$uri should respond with an error"; 255 | is $res->code, 400, "$uri should 400"; 256 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 257 | 'Should have API version in the header'; 258 | is $res->content, 'Bad request: Invalid or missing "q" query param.', 259 | 'Should get proper error message'; 260 | } 261 | 262 | # And that we get a 400 for invalid params. 263 | for my $spec ( 264 | ['l=foo' => 'Bad request: invalid "l" query param.'], 265 | ['o=foo' => 'Bad request: invalid "o" query param.'], 266 | ) { 267 | my $uri = "/search/docs?q=whu&$spec->[0]"; 268 | ok my $res = $cb->(GET $uri), "Fetch $uri"; 269 | ok $res->is_error, "$uri should respond with an error"; 270 | is $res->code, 400, "$uri should 400"; 271 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 272 | 'Should have API version in the header'; 273 | is $res->content, $spec->[1], 'Should get proper error message'; 274 | } 275 | 276 | # Make sure it works with a query and nothing else. 277 | $uri .= '?q=hi'; 278 | ok $res = $cb->(GET $uri), "Fetch $uri"; 279 | ok $res->is_success, "$uri should return success"; 280 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 281 | 'Should have API version in the header'; 282 | is $res->content, '{"foo":1}', 'Content should be JSON of results'; 283 | is_deeply \@params, 284 | [in => 'docs', query => 'hi', offset => undef, limit => undef ], 285 | "$uri should properly dispatch to the searcher"; 286 | 287 | $search_mock->unmock('search'); 288 | }; 289 | 290 | # Test /error basics. 291 | my $err_app = sub { 292 | my $env = shift; 293 | $env->{'psgix.errordocument.PATH_INFO'} = '/what'; 294 | $env->{'psgix.errordocument.SCRIPT_NAME'} = ''; 295 | $env->{'psgix.errordocument.SCRIPT_NAME'} = ''; 296 | $env->{'psgix.errordocument.HTTP_HOST'} = 'localhost'; 297 | $env->{'plack.stacktrace.text'} = 'This is the trace'; 298 | $app->($env); 299 | }; 300 | 301 | test_psgi $err_app => sub { 302 | my $cb = shift; 303 | ok my $res = $cb->(GET '/error'), "GET /error"; 304 | ok $res->is_success, q{Should be success (because it's only served as a subrequest)}; 305 | is $res->header('X-PGXN-API-Version'), PGXN::API->version_string, 306 | 'Should have API version in the header'; 307 | is $res->content, 'internal server error', 'body should be error message'; 308 | 309 | # Check the alert email. 310 | ok my @deliveries = Email::Sender::Simple->default_transport->deliveries, 311 | 'Should have email deliveries.'; 312 | is @deliveries, 1, 'Should have one message'; 313 | is @{ $deliveries[0]->{successes} }, 1, 'Should have been successfully delivered'; 314 | 315 | my $email = $deliveries[0]{email}; 316 | is $email->get_header('Subject'), 'PGXN API Internal Server Error', 317 | 'The subject should be set'; 318 | is $email->get_header('From'), 'api@pgxn.org', 319 | 'From header should be set'; 320 | is $email->get_header('To'), 'alerts@pgxn.org', 321 | 'To header should be set'; 322 | is $email->get_body, 'An error occurred during a request to http://localhost/what. 323 | 324 | Environment: 325 | 326 | { HTTP_HOST => "localhost", PATH_INFO => "/what", SCRIPT_NAME => "" } 327 | 328 | Trace: 329 | 330 | This is the trace 331 | ', 332 | 'The body should be correct'; 333 | Email::Sender::Simple->default_transport->clear_deliveries; 334 | }; 335 | 336 | # Just in case (not done above on MSWin32). 337 | END { remove_tree $doc_root } 338 | -------------------------------------------------------------------------------- /t/sync.t: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env perl -w 2 | 3 | use strict; 4 | use warnings; 5 | use Test::More tests => 65; 6 | # use Test::More 'no_plan'; 7 | use File::Spec::Functions qw(catfile catdir tmpdir); 8 | use Test::MockModule; 9 | use Test::Output; 10 | use File::Path qw(remove_tree); 11 | use File::Copy::Recursive qw(dircopy fcopy); 12 | use Test::File; 13 | use Fcntl qw(:mode); 14 | 15 | my $CLASS; 16 | BEGIN { 17 | $CLASS = 'PGXN::API::Sync'; 18 | use_ok $CLASS or die; 19 | } 20 | 21 | can_ok $CLASS => qw( 22 | new 23 | run 24 | rsync_path 25 | log_file 26 | run_rsync 27 | update_index 28 | validate_distribution 29 | download_for 30 | digest_for 31 | unzip 32 | ); 33 | 34 | my $pgxn = PGXN::API->instance; 35 | $pgxn->doc_root(catdir 't', 'test_sync_root'); 36 | END { remove_tree $pgxn->doc_root } 37 | 38 | ############################################################################## 39 | # Test rsync. 40 | my $attr = $CLASS->meta->get_attribute('rsync_path'); 41 | is $attr->default, 'rsync', 'Default rsync_path should be "rsync"'; 42 | 43 | my $rsync = catfile qw(t bin), 'testrsync' . (PGXN::API::Sync::WIN32 ? '.bat' : ''); 44 | ok my $sync = $CLASS->new( 45 | source => 'rsync://localhost/pgxn', 46 | rsync_path => $rsync, 47 | ), "Construct $CLASS object"; 48 | 49 | my $rsync_out = catfile qw(t data rsync.out); 50 | my $mirror_root = $pgxn->mirror_root; 51 | my $log_file = $sync->log_file; 52 | is $log_file, catfile(tmpdir, "pgxn-api-sync-$$.txt"), 53 | 'Log file name should include PID'; 54 | $sync->log_file($rsync_out); 55 | is $sync->log_file, $rsync_out, 'Should have updated log_file'; 56 | 57 | END { 58 | unlink 'test.tmp'; # written by testrsync 59 | $sync->log_file(''); # Prevent deleting fixtures 60 | } 61 | 62 | ok $sync->run_rsync, 'Run rsync'; 63 | is do { 64 | open my $fh, '<', 'test.tmp' or die "Cannot open test.tmp: $!\n"; 65 | local $/; 66 | <$fh> 67 | }, "--archive 68 | --compress 69 | --delete 70 | --quiet 71 | --log-file-format 72 | %i %n 73 | --log-file 74 | $rsync_out 75 | rsync://localhost/pgxn 76 | $mirror_root 77 | ", 'Rsync should have been properly called'; 78 | 79 | # Rsync our "mirror" to the mirror root. 80 | remove_tree $mirror_root; 81 | dircopy catdir(qw(t root)), $mirror_root; 82 | fcopy catfile(qw(t root index.json)), $pgxn->doc_root; 83 | 84 | ############################################################################## 85 | # Test the regular expression for finding distributions. 86 | my @rsync_out = do { 87 | open my $fh, '<', $rsync_out or die "Cannot open $rsync_out: $!\n"; 88 | <$fh>; 89 | }; 90 | 91 | my $sep = catfile '', ''; 92 | if ($sep ne '/') { 93 | for (@rsync_out) { 94 | my @parts = split /\]/; 95 | $parts[-1] =~ s{/}{$sep}g; 96 | $_ = join ']', @parts; 97 | } 98 | } 99 | 100 | # Test the dist template regex. 101 | ok my $regex = $sync->regex_for_uri_template('download'), 102 | 'Get distribution regex'; 103 | my @found; 104 | for (@rsync_out) { 105 | push @found => $1 if $_ =~ $regex; 106 | } 107 | if ($sep ne '/') { s{\Q$sep}{/}g for @found } 108 | is_deeply \@found, [qw( 109 | dist/pair/0.1.0/pair-0.1.0.zip 110 | dist/pair/0.1.1/pair-0.1.1.zip 111 | dist/pg_french_datatypes/0.1.0/pg_french_datatypes-0.1.0.zip 112 | dist/pg_french_datatypes/0.1.1/pg_french_datatypes-0.1.1.zip 113 | dist/tinyint/0.1.0/tinyint-0.1.0.zip 114 | )], 'It should recognize the distribution files.'; 115 | 116 | # Test the meta template regex. 117 | ok $regex = $sync->regex_for_uri_template('meta'), 118 | 'Get meta regex'; 119 | @found = (); 120 | for (@rsync_out) { 121 | push @found => $1 if $_ =~ $regex; 122 | } 123 | if ($sep ne '/') { s{\Q$sep}{/}g for @found } 124 | 125 | is_deeply \@found, [qw( 126 | dist/pair/0.1.0/META.json 127 | dist/pair/0.1.1/META.json 128 | dist/pg_french_datatypes/0.1.0/META.json 129 | dist/pg_french_datatypes/0.1.1/META.json 130 | dist/tinyint/0.1.0/META.json 131 | )], 'It should recognize the meta files.'; 132 | 133 | # Test the user template regex. 134 | ok $regex = $sync->regex_for_uri_template('user'), 135 | 'Get user regex'; 136 | @found = (); 137 | for (@rsync_out) { 138 | push @found => $1 if $_ =~ $regex; 139 | } 140 | if ($sep ne '/') { s{\Q$sep}{/}g for @found } 141 | 142 | is_deeply \@found, [qw( 143 | user/daamien.json 144 | user/theory.json 145 | user/umitanuki.json 146 | )], 'It should recognize the user files.'; 147 | 148 | # Test the extension template regex. 149 | ok $regex = $sync->regex_for_uri_template('extension'), 150 | 'Get extension regex'; 151 | @found = (); 152 | for (@rsync_out) { 153 | push @found => $1 if $_ =~ $regex; 154 | } 155 | if ($sep ne '/') { s{\Q$sep}{/}g for @found } 156 | 157 | is_deeply \@found, [qw( 158 | extension/pair.json 159 | extension/pg_french_datatypes.json 160 | extension/tinyint.json 161 | )], 'It should recognize the extension files.'; 162 | 163 | # Test the tag template regex. 164 | ok $regex = $sync->regex_for_uri_template('tag'), 165 | 'Get tag regex'; 166 | @found = (); 167 | for (@rsync_out) { 168 | push @found => $1 if $_ =~ $regex; 169 | } 170 | if ($sep ne '/') { s{\Q$sep}{/}g for @found } 171 | 172 | is_deeply \@found, [ 173 | "tag/data types.json", 174 | "tag/france.json", 175 | "tag/key value pair.json", 176 | "tag/key value.json", 177 | "tag/ordered pair.json", 178 | "tag/pair.json", 179 | "tag/variadic function.json", 180 | ], 'It should recognize the tag files.'; 181 | 182 | # Test the mirrors template regex. 183 | ok $regex = $sync->regex_for_uri_template('mirrors'), 184 | 'Get mirrors regex'; 185 | @found = (); 186 | for (@rsync_out) { 187 | push @found => $1 if $_ =~ $regex; 188 | } 189 | if ($sep ne '/') { s{\Q$sep}{/}g for @found } 190 | is_deeply \@found, ['meta/mirrors.json'], 'Should find mirrors.json'; 191 | 192 | # Test the stats template regex. 193 | ok $regex = $sync->regex_for_uri_template('stats'), 194 | 'Get stats regex'; 195 | @found = (); 196 | for (@rsync_out) { 197 | push @found => $1 if $_ =~ $regex; 198 | } 199 | if ($sep ne '/') { s{\Q$sep}{/}g for @found } 200 | is_deeply \@found, [qw( 201 | stats/dist.json 202 | stats/extension.json 203 | stats/user.json 204 | stats/tag.json 205 | stats/summary.json 206 | )], 'Should find stats JSON files'; 207 | 208 | # Test the spec template regex. 209 | ok $regex = $sync->regex_for_uri_template('spec'), 210 | 'Get spec regex'; 211 | @found = (); 212 | for (@rsync_out) { 213 | push @found => $1 if $_ =~ $regex; 214 | } 215 | if ($sep ne '/') { s{\Q$sep}{/}g for @found } 216 | is_deeply \@found, ['meta/spec.txt'], 'Should find spec.txt'; 217 | 218 | ############################################################################## 219 | # Reset the rsync output and have it do its thing. 220 | my $mock = Test::MockModule->new($CLASS); 221 | $mock->mock(validate_distribution => sub { push @found => $_[1]; $_[1] }); 222 | @found = (); 223 | 224 | my $api_mock = Test::MockModule->new('PGXN::API'); 225 | $api_mock->mock(uri_templates => sub { 226 | fail 'Should not get URI templates before updating the mirror meta'; 227 | }); 228 | 229 | my $idx_mock = Test::MockModule->new('PGXN::API::Indexer'); 230 | my @dists; 231 | $idx_mock->mock(add_distribution => sub { push @dists => $_[1] }); 232 | my (@paths, @meths, @parsed, @users); 233 | $idx_mock->mock(update_root_json => sub { push @meths => 'update_root_json' }); 234 | $idx_mock->mock(finalize => sub { push @meths => 'finalize' }); 235 | $idx_mock->mock(copy_from_mirror => sub { push @paths => $_[1] }); 236 | $idx_mock->mock(parse_from_mirror => sub { shift; push @parsed => \@_ }); 237 | $idx_mock->mock(merge_user => sub { push @users => $_[1] }); 238 | $idx_mock->mock(update_mirror_meta => sub { 239 | $api_mock->unmock_all; 240 | pass 'Should update mirror meta'; 241 | }); 242 | 243 | if ($sep ne '/') { 244 | my $tmplog = catfile tmpdir, "pgxnapi-testlog$$.txt"; 245 | open my $fh, '>', $tmplog or die "Cannot open $tmplog: $!\n"; 246 | print $fh @rsync_out; 247 | close $fh; 248 | END { unlink $tmplog if $tmplog } 249 | $sync->log_file($tmplog); 250 | } 251 | 252 | ok $sync->update_index, 'Update the index'; 253 | is_deeply \@meths, [qw(update_root_json finalize)], 254 | 'The root index.json should have been updated and the update finalized'; 255 | if ($sep ne '/') { s{\Q$sep}{/}g for @found, @dists, @paths } 256 | 257 | is_deeply \@found, [qw( 258 | dist/pair/0.1.0/META.json 259 | dist/pair/0.1.1/META.json 260 | dist/pg_french_datatypes/0.1.0/META.json 261 | dist/pg_french_datatypes/0.1.1/META.json 262 | dist/tinyint/0.1.0/META.json 263 | )], 'It should have processed the meta files'; 264 | is_deeply \@dists, \@found, 'And it should have passed them to the indexer'; 265 | is_deeply \@paths, [qw( 266 | meta/mirrors.json 267 | meta/spec.txt 268 | stats/dist.json 269 | stats/extension.json 270 | stats/user.json 271 | stats/tag.json 272 | stats/summary.json 273 | )], 'And it should have found and copied mirrors, spec, and stats'; 274 | is_deeply \@parsed, [["meta${sep}spec.txt", 'Multimarkdown']], 275 | 'And it should have parsed spec.txt'; 276 | is_deeply \@users, [qw( 277 | daamien 278 | theory 279 | umitanuki 280 | )], 'And it should have merged all user.json files'; 281 | 282 | ############################################################################## 283 | # digest_for() 284 | my $pgz = catfile qw(dist pair 0.1.1 pair-0.1.1.zip); 285 | is $sync->digest_for($pgz), '443cbcf678a3c2f479c4c069bcb96054d9b25a32', 286 | 'Should get expected digest from digest_for()'; 287 | 288 | ############################################################################## 289 | # Test validate_distribution(). 290 | $mock->unmock('validate_distribution'); 291 | 292 | my $json = catfile qw(dist pair 0.1.1 META.json); 293 | $mock->mock(unzip => sub { 294 | is $_[1], $pgz, "unzip should be passed $pgz"; 295 | }); 296 | ok $sync->validate_distribution($json), "Process $json"; 297 | 298 | # It should fail for an invalid checksum. 299 | CHECKSUM: { 300 | $mock->mock(unzip => sub { 301 | fail 'unzip should not be called when checksum fails' 302 | }); 303 | my $json = catfile qw( dist pair/0.1.0/META.json); 304 | my $pgz = catfile qw( dist pair/0.1.0/META.json); 305 | my $pgzp = catfile $pgxn->mirror_root, $pgz; 306 | stderr_is { $sync->validate_distribution($json ) } 307 | "Checksum verification failed for $pgzp\n", 308 | 'Should get warning when checksum fails.'; 309 | $mock->unmock('unzip'); 310 | } 311 | 312 | ############################################################################## 313 | # Test unzip. 314 | my @files = (qw( 315 | Changes 316 | META.json 317 | Makefile 318 | README.md 319 | ), catfile(qw(doc pair.md)), 320 | catfile(qw(sql pair.sql)), 321 | catfile(qw(sql uninstall_pair.sql)), 322 | catfile(qw(test sql base.sql)), 323 | catfile(qw(test expected base.out)), 324 | ); 325 | 326 | my @dirs = ( 327 | 'test', 328 | catfile(qw(test expected)), 329 | catfile(qw(test sql)), 330 | 'doc', 331 | 'sql', 332 | ); 333 | 334 | my $src_dir = PGXN::API->instance->source_dir; 335 | my $base = catdir $src_dir, 'pair', 'pair-0.1.1'; 336 | file_not_exists_ok $base, "pair-0.1.1 directory should not exist"; 337 | 338 | # Unzip it. 339 | ok my $zip = $sync->unzip($pgz, {name => 'pair'}), "Unzip $pgz"; 340 | isa_ok $zip, 'Archive::Zip'; 341 | file_exists_ok catfile($base, $_), "$_ should now exist" for @files; 342 | my $readall = S_IRUSR | S_IRGRP | S_IROTH; 343 | my $dirall = S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH; 344 | ok( 345 | ((stat $base)[2] & $dirall) == $dirall, 346 | 'Root directory should be writeable by owner and readable and executable by all' 347 | ); 348 | ok( 349 | ((stat catfile $base, $_)[2] & $dirall) == $dirall, 350 | "Directory $_ should be writeable by owner and readable and executable by all" 351 | ) for @dirs; 352 | ok( 353 | ((stat catfile $base, $_)[2] & $readall) == $readall, 354 | "File $_ should be readable by all", 355 | ) for @files; 356 | 357 | # Now try a brokenated zip file. 358 | stderr_like { $sync->unzip($json) } 359 | qr/format error: can't find EOCD signature/, 360 | 'Should get a warning for an invalid zip file'; 361 | 362 | ############################################################################## 363 | # Make sure each distribution is indexed. 364 | my @distros; 365 | $idx_mock->mock(add_distribution => sub { push @distros => $_[1] }); 366 | 367 | my @valids = qw(foo bar baz); 368 | $mock->mock(validate_distribution => sub { shift @valids }); 369 | 370 | ok $sync->update_index, 'Update the index'; 371 | 372 | is_deeply \@distros, [qw(foo bar baz)], 373 | 'The distributions should have been passed to an indexer'; 374 | --------------------------------------------------------------------------------