├── .circleci
└── config.yml
├── .flake8
├── .gitattributes
├── .gitignore
├── .readthedocs.yml
├── .stickler.yml
├── AUTHORS.md
├── CONTRIBUTING.md
├── LICENSE.txt
├── MANIFEST.in
├── README.rst
├── ci
└── requirements-conda.txt
├── codecov.yml
├── docs
├── README.rst
├── requirements-docs.txt
└── source
│ ├── Makefile
│ ├── _static
│ ├── js
│ │ └── authcodescripts.js
│ └── style.css
│ ├── _templates
│ └── layout.html
│ ├── api.rst
│ ├── changelog.rst
│ ├── cli.rst
│ ├── conf.py
│ ├── contributing.rst
│ ├── index.rst
│ ├── install.rst
│ ├── intro.rst
│ ├── oauth.rst
│ └── privacy.rst
├── noxfile.py
├── pydata_google_auth
├── __init__.py
├── __main__.py
├── _version.py
├── _webserver.py
├── auth.py
├── cache.py
└── exceptions.py
├── release-procedure.md
├── requirements-dev.txt
├── requirements.txt
├── setup.cfg
├── setup.py
├── testing
├── .gitignore
├── constraints-3.10.txt
├── constraints-3.11.txt
├── constraints-3.12.txt
├── constraints-3.13.txt
├── constraints-3.14.txt
├── constraints-3.15.txt
└── constraints-3.9.txt
├── tests
├── __init__.py
├── system
│ ├── __init__.py
│ └── test_auth.py
└── unit
│ ├── __init__.py
│ ├── test_auth.py
│ ├── test_cache.py
│ └── test_webserver.py
└── versioneer.py
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | # Python CircleCI 2.0 configuration file
2 | #
3 | # Check https://circleci.com/docs/2.0/language-python/ for more details
4 | #
5 | version: 2
6 | jobs:
7 | "unit-2.7":
8 | docker:
9 | - image: thekevjames/nox
10 | steps:
11 | - checkout
12 | - run: nox -s unit-2.7
13 |
14 | "unit-3.6":
15 | docker:
16 | - image: thekevjames/nox
17 | steps:
18 | - checkout
19 | - run: nox -s unit-3.6
20 |
21 | "unit-3.7":
22 | docker:
23 | - image: thekevjames/nox
24 | steps:
25 | - checkout
26 | - run: nox -s unit-3.7
27 |
28 | "unit-3.8":
29 | docker:
30 | - image: thekevjames/nox
31 | steps:
32 | - checkout
33 | - run: nox -s unit-3.8
34 |
35 | "unit-3.9":
36 | docker:
37 | - image: thekevjames/nox
38 | steps:
39 | - checkout
40 | - run: nox -s unit-3.9 cover
41 |
42 | "unit-3.10":
43 | docker:
44 | - image: thekevjames/nox
45 | steps:
46 | - checkout
47 | - run: nox -s unit-3.10 cover
48 |
49 | "unit-3.11":
50 | docker:
51 | - image: thekevjames/nox
52 | steps:
53 | - checkout
54 | - run: nox -s unit-3.11 cover
55 |
56 | "unit-3.12":
57 | docker:
58 | - image: thekevjames/nox
59 | steps:
60 | - checkout
61 | - run: nox -s unit-3.12 cover
62 |
63 | "unit-3.13":
64 | docker:
65 | - image: thekevjames/nox
66 | steps:
67 | - checkout
68 | - run: nox -s unit-3.13 cover
69 |
70 | "lint":
71 | docker:
72 | - image: thekevjames/nox
73 | steps:
74 | - checkout
75 | - run: nox -s lint
76 |
77 | workflows:
78 | version: 2
79 | build:
80 | jobs:
81 | - "unit-2.7"
82 | - "unit-3.6"
83 | - "unit-3.7"
84 | - "unit-3.8"
85 | - "unit-3.9"
86 | - "unit-3.10"
87 | - "unit-3.11"
88 | - "unit-3.12"
89 | - "unit-3.13"
90 | - lint
91 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | ignore = E203, E231, E266, E501, W503
3 | exclude =
4 | # Standard linting exemptions.
5 | __pycache__,
6 | .git,
7 | *.pyc,
8 | conf.py
9 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | pydata_google_auth/_version.py export-subst
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.py[cod]
2 | *.sw[op]
3 |
4 | # C extensions
5 | *.so
6 |
7 | # Packages
8 | *.egg
9 | *.egg-info
10 | dist
11 | build
12 | eggs
13 | .eggs
14 | parts
15 | bin
16 | var
17 | sdist
18 | develop-eggs
19 | .installed.cfg
20 | lib
21 | lib64
22 | __pycache__
23 |
24 | # Installer logs
25 | pip-log.txt
26 |
27 | # Unit test / coverage reports
28 | .coverage
29 | .nox
30 | .cache
31 | .pytest_cache
32 |
33 |
34 | # Mac
35 | .DS_Store
36 |
37 | # JetBrains
38 | .idea
39 |
40 | # VS Code
41 | .vscode
42 |
43 | # emacs
44 | *~
45 |
46 | # Built documentation
47 | docs/_build
48 | docs/source/_build
49 | bigquery/docs/generated
50 | docs.metadata
51 |
52 | # Virtual environment
53 | env/
54 |
55 | # Test logs
56 | coverage.xml
57 | *sponge_log.xml
58 |
59 | # System test environment variables.
60 | system_tests/local_test_setup
61 |
62 | # Make sure a generated file isn't accidentally committed.
63 | pylintrc
64 | pylintrc.test
65 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | build:
4 | os: ubuntu-24.04
5 | tools:
6 | python: "3.10"
7 |
8 | python:
9 | install:
10 | - requirements: docs/requirements-docs.txt
11 | # Install our python package before building the docs
12 | - method: pip
13 | path: .
14 |
15 | sphinx:
16 | configuration: docs/source/conf.py
17 |
--------------------------------------------------------------------------------
/.stickler.yml:
--------------------------------------------------------------------------------
1 | linters:
2 | black:
3 | config: ./pyproject.toml
4 | fixer: true
--------------------------------------------------------------------------------
/AUTHORS.md:
--------------------------------------------------------------------------------
1 | About the Copyright Holders
2 | ===========================
3 |
4 | * Copyright (c) 2008-2011 AQR Capital Management, LLC
5 |
6 | AQR Capital Management began pandas development in 2008. Development was
7 | led by Wes McKinney. AQR released the source under this license in 2009.
8 | * Copyright (c) 2011-2012, Lambda Foundry, Inc.
9 |
10 | Wes is now an employee of Lambda Foundry, and remains the pandas project
11 | lead.
12 | * Copyright (c) 2011-2012, PyData Development Team
13 |
14 | The PyData Development Team is the collection of developers of the PyData
15 | project. This includes all of the PyData sub-projects, including pandas. The
16 | core team that coordinates development on GitHub can be found here:
17 | http://github.com/pydata.
18 |
19 | Full credits for pandas contributors can be found in the documentation.
20 |
21 | Our Copyright Policy
22 | ====================
23 |
24 | PyData uses a shared copyright model. Each contributor maintains copyright
25 | over their contributions to PyData. However, it is important to note that
26 | these contributions are typically only changes to the repositories. Thus,
27 | the PyData source code, in its entirety, is not the copyright of any single
28 | person or institution. Instead, it is the collective copyright of the
29 | entire PyData Development Team. If individual contributors want to maintain
30 | a record of what changes/contributions they have specific copyright on,
31 | they should indicate their copyright in the commit message of the change
32 | when they commit the change to one of the PyData repositories.
33 |
34 | With this in mind, the following banner should be used in any source code
35 | file to indicate the copyright and license terms:
36 |
37 | ```
38 | #-----------------------------------------------------------------------------
39 | # Copyright (c) 2012, PyData Development Team
40 | # All rights reserved.
41 | #
42 | # Distributed under the terms of the BSD Simplified License.
43 | #
44 | # The full license is in the LICENSE file, distributed with this software.
45 | #-----------------------------------------------------------------------------
46 | ```
47 |
48 | Other licenses can be found in the LICENSES directory.
49 |
50 | License
51 | =======
52 |
53 | pandas is distributed under a 3-clause ("Simplified" or "New") BSD
54 | license. Parts of NumPy, SciPy, numpydoc, bottleneck, which all have
55 | BSD-compatible licenses, are included. Their licenses follow the pandas
56 | license.
57 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | See the [contributing guide in the pydata-google-auth
4 | docs](http://pydata-google-auth.readthedocs.io/en/latest/contributing.html).
5 |
6 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2008-2012, AQR Capital Management, LLC, Lambda Foundry, Inc. and PyData Development Team
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include MANIFEST.in
2 | include README.rst
3 | include LICENSE.txt
4 | include setup.py
5 |
6 | graft pydata_google_auth
7 |
8 | global-exclude *.so
9 | global-exclude *.pyd
10 | global-exclude *.pyc
11 | global-exclude *~
12 | global-exclude \#*
13 | global-exclude .git*
14 | global-exclude .DS_Store
15 | global-exclude *.png
16 |
17 | include versioneer.py
18 | include pydata_google_auth/_version.py
19 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | pydata-google-auth
2 | ==================
3 |
4 | |Build Status| |Version Status| |Coverage Status|
5 |
6 | **pydata-google-auth** is a package providing helpers for authenticating to Google APIs.
7 |
8 |
9 | Installation
10 | ------------
11 |
12 |
13 | Install latest release version via conda
14 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
15 |
16 | .. code-block:: shell
17 |
18 | $ conda install pydata-google-auth --channel conda-forge
19 |
20 | Install latest release version via pip
21 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
22 |
23 | .. code-block:: shell
24 |
25 | $ pip install pydata-google-auth
26 |
27 | Install latest development version
28 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
29 |
30 | .. code-block:: shell
31 |
32 | $ pip install git+https://github.com/pydata/pydata-google-auth.git
33 |
34 |
35 | Usage
36 | -----
37 |
38 | Use the ``pydata_google_auth.get_user_credentials()`` function to
39 | authenticate to Google APIs with user credentials.
40 |
41 | .. code-block:: python
42 |
43 | import pydata_google_auth
44 | credentials = pydata_google_auth.get_user_credentials(
45 | ['https://www.googleapis.com/auth/cloud-platform'],
46 | )
47 |
48 | # Use the credentials in other libraries, such as the Google BigQuery
49 | # client library.
50 | from google.cloud import bigquery
51 | client = bigquery.Client(project='YOUR-PROJECT-ID', credentials=credentials)
52 |
53 | See the `pydata-google-auth documentation `_ for more details.
54 |
55 | .. |Build Status| image:: https://circleci.com/gh/pydata/pydata-google-auth/tree/master.svg?style=svg
56 | :target: https://circleci.com/gh/pydata/pydata-google-auth/tree/master
57 | .. |Version Status| image:: https://img.shields.io/pypi/v/pydata-google-auth.svg
58 | :target: https://pypi.python.org/pypi/pydata-google-auth/
59 | .. |Coverage Status| image:: https://img.shields.io/codecov/c/github/pydata/pydata-google-auth.svg
60 | :target: https://codecov.io/gh/pydata/pydata-google-auth/
61 |
--------------------------------------------------------------------------------
/ci/requirements-conda.txt:
--------------------------------------------------------------------------------
1 | setuptools
2 | google-auth
3 | google-auth-oauthlib
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | coverage:
2 | status:
3 | project:
4 | default:
5 | target: '0'
6 | enabled: no
7 | patch:
8 | default:
9 | enabled: no
10 | target: '50'
11 | branches: null
12 |
--------------------------------------------------------------------------------
/docs/README.rst:
--------------------------------------------------------------------------------
1 | To build a local copy of the pydata-google-auth docs, install the programs in
2 | requirements-docs.txt and run 'make html'. If you use the conda package manager
3 | these commands suffice::
4 |
5 | git clone git@github.com:pydata/pydata-google-auth.git
6 | cd pydata-google-docs/docs/source
7 | conda create -n pydata-google-auth-docs --file ../requirements-docs.txt
8 | source activate pydata-google-auth-docs
9 | make html
10 | open _build/html/index.html
11 |
--------------------------------------------------------------------------------
/docs/requirements-docs.txt:
--------------------------------------------------------------------------------
1 | # We need to pin to specific versions of the `sphinxcontrib-*` packages
2 | # which still support sphinx 4.x.
3 | # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
4 | # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
5 | sphinxcontrib-applehelp==1.0.4
6 | sphinxcontrib-devhelp==1.0.2
7 | sphinxcontrib-htmlhelp==2.0.1
8 | sphinxcontrib-qthelp==1.0.3
9 | sphinxcontrib-serializinghtml==1.1.5
10 | sphinx==4.5.0
11 | alabaster
12 | recommonmark
--------------------------------------------------------------------------------
/docs/source/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 | # the i18n builder cannot share the environment and doctrees with the others
15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
16 |
17 | .PHONY: help
18 | help:
19 | @echo "Please use \`make ' where is one of"
20 | @echo " html to make standalone HTML files"
21 | @echo " dirhtml to make HTML files named index.html in directories"
22 | @echo " singlehtml to make a single large HTML file"
23 | @echo " pickle to make pickle files"
24 | @echo " json to make JSON files"
25 | @echo " htmlhelp to make HTML files and a HTML help project"
26 | @echo " qthelp to make HTML files and a qthelp project"
27 | @echo " applehelp to make an Apple Help Book"
28 | @echo " devhelp to make HTML files and a Devhelp project"
29 | @echo " epub to make an epub"
30 | @echo " epub3 to make an epub3"
31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
32 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
34 | @echo " text to make text files"
35 | @echo " man to make manual pages"
36 | @echo " texinfo to make Texinfo files"
37 | @echo " info to make Texinfo files and run them through makeinfo"
38 | @echo " gettext to make PO message catalogs"
39 | @echo " changes to make an overview of all changed/added/deprecated items"
40 | @echo " xml to make Docutils-native XML files"
41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
42 | @echo " linkcheck to check all external links for integrity"
43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
44 | @echo " coverage to run coverage check of the documentation (if enabled)"
45 | @echo " dummy to check syntax errors of document sources"
46 |
47 | .PHONY: clean
48 | clean:
49 | rm -rf $(BUILDDIR)/*
50 |
51 | .PHONY: html
52 | html:
53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
54 | @echo
55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
56 |
57 | .PHONY: dirhtml
58 | dirhtml:
59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
60 | @echo
61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
62 |
63 | .PHONY: singlehtml
64 | singlehtml:
65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
66 | @echo
67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
68 |
69 | .PHONY: pickle
70 | pickle:
71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
72 | @echo
73 | @echo "Build finished; now you can process the pickle files."
74 |
75 | .PHONY: json
76 | json:
77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
78 | @echo
79 | @echo "Build finished; now you can process the JSON files."
80 |
81 | .PHONY: htmlhelp
82 | htmlhelp:
83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
84 | @echo
85 | @echo "Build finished; now you can run HTML Help Workshop with the" \
86 | ".hhp project file in $(BUILDDIR)/htmlhelp."
87 |
88 | .PHONY: qthelp
89 | qthelp:
90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
91 | @echo
92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pydata-google-auth.qhcp"
95 | @echo "To view the help file:"
96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pydata-google-auth.qhc"
97 |
98 | .PHONY: applehelp
99 | applehelp:
100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
101 | @echo
102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
103 | @echo "N.B. You won't be able to view it unless you put it in" \
104 | "~/Library/Documentation/Help or install it in your application" \
105 | "bundle."
106 |
107 | .PHONY: devhelp
108 | devhelp:
109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
110 | @echo
111 | @echo "Build finished."
112 | @echo "To view the help file:"
113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/pydata-google-auth"
114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pydata-google-auth"
115 | @echo "# devhelp"
116 |
117 | .PHONY: epub
118 | epub:
119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
120 | @echo
121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
122 |
123 | .PHONY: epub3
124 | epub3:
125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
126 | @echo
127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
128 |
129 | .PHONY: latex
130 | latex:
131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
132 | @echo
133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
135 | "(use \`make latexpdf' here to do that automatically)."
136 |
137 | .PHONY: latexpdf
138 | latexpdf:
139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
140 | @echo "Running LaTeX files through pdflatex..."
141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
143 |
144 | .PHONY: latexpdfja
145 | latexpdfja:
146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
147 | @echo "Running LaTeX files through platex and dvipdfmx..."
148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
150 |
151 | .PHONY: text
152 | text:
153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
154 | @echo
155 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
156 |
157 | .PHONY: man
158 | man:
159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
160 | @echo
161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
162 |
163 | .PHONY: texinfo
164 | texinfo:
165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
166 | @echo
167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
168 | @echo "Run \`make' in that directory to run these through makeinfo" \
169 | "(use \`make info' here to do that automatically)."
170 |
171 | .PHONY: info
172 | info:
173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
174 | @echo "Running Texinfo files through makeinfo..."
175 | make -C $(BUILDDIR)/texinfo info
176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
177 |
178 | .PHONY: gettext
179 | gettext:
180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
181 | @echo
182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
183 |
184 | .PHONY: changes
185 | changes:
186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
187 | @echo
188 | @echo "The overview file is in $(BUILDDIR)/changes."
189 |
190 | .PHONY: linkcheck
191 | linkcheck:
192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
193 | @echo
194 | @echo "Link check complete; look for any errors in the above output " \
195 | "or in $(BUILDDIR)/linkcheck/output.txt."
196 |
197 | .PHONY: doctest
198 | doctest:
199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
200 | @echo "Testing of doctests in the sources finished, look at the " \
201 | "results in $(BUILDDIR)/doctest/output.txt."
202 |
203 | .PHONY: coverage
204 | coverage:
205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
206 | @echo "Testing of coverage in the sources finished, look at the " \
207 | "results in $(BUILDDIR)/coverage/python.txt."
208 |
209 | .PHONY: xml
210 | xml:
211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
212 | @echo
213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
214 |
215 | .PHONY: pseudoxml
216 | pseudoxml:
217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
218 | @echo
219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
220 |
221 | .PHONY: dummy
222 | dummy:
223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
224 | @echo
225 | @echo "Build finished. Dummy builder generates no files."
226 |
--------------------------------------------------------------------------------
/docs/source/_static/js/authcodescripts.js:
--------------------------------------------------------------------------------
1 | function onloadoauthcode() {
2 | const PARAMS = new Proxy(new URLSearchParams(window.location.search), {
3 | get: (searchParams, prop) => searchParams.get(prop),
4 | });
5 | const AUTH_CODE = PARAMS.code;
6 |
7 | document.querySelector('.auth-code').textContent = AUTH_CODE;
8 |
9 | setupCopyButton(document.querySelector('.copy'), AUTH_CODE);
10 | }
11 |
12 | function setupCopyButton(button, text) {
13 | button.addEventListener('click', () => {
14 | navigator.clipboard.writeText(text);
15 | button.textContent = "Verification Code Copied";
16 | setTimeout(() => {
17 | // Remove the aria-live label so that when the
18 | // button text changes back to "Copy", it is
19 | // not read out.
20 | button.removeAttribute("aria-live");
21 | button.textContent = "Copy";
22 | }, 1000);
23 |
24 | // Re-Add the aria-live attribute to enable speech for
25 | // when button text changes next time.
26 | setTimeout(() => {
27 | button.setAttribute("aria-live", "assertive");
28 | }, 2000);
29 | });
30 | }
--------------------------------------------------------------------------------
/docs/source/_static/style.css:
--------------------------------------------------------------------------------
1 | @import url("theme.css");
2 |
3 | a.internal em {font-style: normal}
4 |
--------------------------------------------------------------------------------
/docs/source/_templates/layout.html:
--------------------------------------------------------------------------------
1 | {% extends "!layout.html" %}
2 | {% set css_files = css_files + ["_static/style.css"] %}
3 |
--------------------------------------------------------------------------------
/docs/source/api.rst:
--------------------------------------------------------------------------------
1 | .. currentmodule:: pydata_google_auth
2 | .. _api:
3 |
4 | *************
5 | API Reference
6 | *************
7 |
8 | .. autosummary::
9 |
10 | default
11 | get_user_credentials
12 | load_user_credentials
13 | save_user_credentials
14 | load_service_account_credentials
15 | cache.CredentialsCache
16 | cache.READ_WRITE
17 | cache.REAUTH
18 | cache.NOOP
19 | exceptions.PyDataCredentialsError
20 |
21 | .. automodule:: pydata_google_auth
22 | :members:
23 | :show-inheritance:
24 |
25 | .. automodule:: pydata_google_auth.cache
26 | :members:
27 | :show-inheritance:
28 |
29 | .. automodule:: pydata_google_auth.exceptions
30 | :members:
31 | :show-inheritance:
32 |
--------------------------------------------------------------------------------
/docs/source/changelog.rst:
--------------------------------------------------------------------------------
1 | Changelog
2 | =========
3 |
4 | .. _changelog-1.9.0:
5 |
6 | 1.9.0 / (2024-11-21)
7 | --------------------
8 |
9 | **Breaking Change:** Drop support for Python <= 3.8 (:issue:`77`)
10 |
11 | - Try Colab credentials in `get_user_credentials`, avoids warning on Colab Enterprise (:issue:`76`)
12 | - Clean up remaining uses of six (:issue:`73`)
13 |
14 |
15 | .. _changelog-1.8.2:
16 |
17 | 1.8.2 / (2023-08-01)
18 | --------------------
19 |
20 | - Ensure that the user credentials flow always gets a refresh token.
21 | (:issue:`72`)
22 |
23 | .. _changelog-1.8.1:
24 |
25 | 1.8.1 / (2023-07-10)
26 | --------------------
27 |
28 | - If any exception occurs during Google Colab authentication, fallback to
29 | the Google Application Default Credentials flow. (:issue:`71`)
30 |
31 | .. _changelog-1.8.0:
32 |
33 | 1.8.0 / (2023-05-09)
34 | --------------------
35 |
36 | - When running on Google Colab, try Colab-based authentication
37 | (``google.colab.auth.authenticate_user()``) before attempting the Google
38 | Application Default Credentials flow. (:issue:`68`)
39 |
40 | .. _changelog-1.7.0:
41 |
42 | 1.7.0 / (2023-02-07)
43 | --------------------
44 |
45 | - Reissue of the library with the changes from 1.6.0 but with a new
46 | version number due to a conflict in releases.
47 |
48 | .. _changelog-1.6.0:
49 |
50 | 1.6.0 / (2023-02-07)
51 | --------------------
52 |
53 | - Adds decision logic to handle use cases where a user may not have the
54 | ability to log in via an Out of Band authentication flow. (:issue:`54`)
55 |
56 | - Also provides an OAuth page as part of the documentation.
57 |
58 | .. _changelog-1.5.0:
59 |
60 | 1.5.0 / (2023-01-09)
61 | --------------------
62 |
63 | - Adds ability to provide redirect uri. (:issue:`58`)
64 |
65 | .. _changelog-1.4.0:
66 |
67 | 1.4.0 / (2022-03-14)
68 | --------------------
69 |
70 | - Default ``use_local_webserver`` to ``True``. Google has deprecated the
71 | ``use_local_webserver = False`` `"out of band" (copy-paste) flow
72 | `_.
73 | The ``use_local_webserver = False`` option is planned to stop working in
74 | October 2022.
75 |
76 | .. _changelog-1.3.0:
77 |
78 | 1.3.0 / (2021-12-03)
79 | --------------------
80 |
81 | - Adds support for Python 3.10. (:issue:`51`)
82 | - Fixes typo in documentation. (:issue:`44`)
83 |
84 | .. _changelog-1.2.0:
85 |
86 | 1.2.0 / (2021-04-21)
87 | --------------------
88 |
89 | - Adds :func:`pydata_google_auth.load_service_account_credentials` function to
90 | get service account credentials from the specified JSON path. (:issue:`39`)
91 |
92 | .. _changelog-1.1.0:
93 |
94 | 1.1.0 / (2020-04-23)
95 | --------------------
96 |
97 | - Try a range of ports between 8080 and 8090 when ``use_local_webserver`` is
98 | ``True``. (:issue:`35`)
99 |
100 | .. _changelog-1.0.0:
101 |
102 | 1.0.0 / (2020-04-20)
103 | --------------------
104 |
105 | - Mark package as 1.0, generally available.
106 | - Update introduction with link to instructions on creating a Google Cloud
107 | project. (:issue:`18`)
108 |
109 | .. _changelog-0.3.0:
110 |
111 | 0.3.0 / (2020-02-04)
112 | --------------------
113 |
114 | - Add ``python -m pydata_google_auth`` CLI for working with user credentials.
115 | (:issue:`28`)
116 |
117 | .. _changelog-0.2.1:
118 |
119 | 0.2.1 / (2019-12-12)
120 | --------------------
121 |
122 | - Re-enable ``auth_local_webserver`` in ``default`` method. Show warning,
123 | rather than fallback to console.
124 |
125 | .. _changelog-0.2.0:
126 |
127 | 0.2.0 / (2019-12-12)
128 | --------------------
129 |
130 | - **Deprecate** ``auth_local_webserver`` argument in favor of
131 | ``use_local_webserver`` argument (:issue:`20`).
132 |
133 | New Features
134 | ^^^^^^^^^^^^^
135 |
136 | - Adds :func:`pydata_google_auth.save_user_credentials` function to get user
137 | credentials and then save them to a specified JSON path. (:issue:`22`)
138 |
139 | Bug Fixes
140 | ^^^^^^^^^
141 |
142 | - Update OAuth2 token endpoint to latest URI from Google. (:issue:`27`)
143 | - Don't raise error when the ``APPDATA`` environment variable isn't set on
144 | Windows. (:issue:`29`)
145 |
146 | .. _changelog-0.1.3:
147 |
148 | 0.1.3 / (2019-02-26)
149 | --------------------
150 |
151 | Bug Fixes
152 | ^^^^^^^^^
153 |
154 | - Respect the ``dirname`` and ``filename`` arguments to the
155 | :class:`~pydata_google_auth.cache.ReadWriteCredentialsCache` and
156 | :class:`~pydata_google_auth.cache.WriteOnlyCredentialsCache` constructors.
157 | (:issue:`16`, :issue:`17`)
158 |
159 | .. _changelog-0.1.2:
160 |
161 | 0.1.2 / (2019-02-01)
162 | --------------------
163 |
164 | Bug Fixes
165 | ^^^^^^^^^
166 |
167 | - Don't write to the filesystem at module import time. This fixes an issue
168 | where the module could not be imported on systems where the file system is
169 | unwriteable. (:issue:`10`, :issue:`11`)
170 |
171 | .. _changelog-0.1.1:
172 |
173 | 0.1.1 / (2018-10-26)
174 | --------------------
175 |
176 | - Add LICENSE.txt to package manifest.
177 | - Document privacy policy.
178 |
179 | .. _changelog-0.1.0:
180 |
181 | 0.1.0 / (2018-10-23)
182 | --------------------
183 |
184 | - Add ``cache`` module for configuring caching behaviors. (:issue:`1`)
185 | - Fork the `pandas-gbq project `_ and
186 | refactor out helpers for working with Google credentials.
187 |
--------------------------------------------------------------------------------
/docs/source/cli.rst:
--------------------------------------------------------------------------------
1 | Command-line Reference
2 | ======================
3 |
4 | Run the ``pydata_google_auth`` CLI with ``python -m pydata_google_auth``.
5 |
6 | .. code:: bash
7 |
8 | usage: python -m pydata_google_auth [-h] {login,print-token} ...
9 |
10 | Manage credentials for Google APIs.
11 |
12 | optional arguments:
13 | -h, --help show this help message and exit
14 |
15 | commands:
16 | {login,print-token}
17 | login Login to Google and save user credentials as a JSON
18 | file to use as Application Default Credentials.
19 | print-token Load a credentials JSON file and print an access token.
20 |
21 |
22 | Saving user credentials with ``login``
23 | --------------------------------------
24 |
25 | .. code:: bash
26 |
27 | usage: python -m pydata_google_auth login [-h] [--scopes SCOPES]
28 | [--client_id CLIENT_ID]
29 | [--client_secret CLIENT_SECRET]
30 | [--use_local_webserver]
31 | destination
32 |
33 | positional arguments:
34 | destination Path of where to save user credentials JSON file.
35 |
36 | optional arguments:
37 | -h, --help show this help message and exit
38 | --scopes SCOPES Comma-separated list of scopes (permissions) to
39 | request from Google. See: https://developers.google.co
40 | m/identity/protocols/googlescopes for a list of
41 | available scopes. Default:
42 | https://www.googleapis.com/auth/cloud-platform
43 | --client_id CLIENT_ID
44 | (Optional, but recommended) Client ID. Use this in
45 | combination with the --client-secret argument to
46 | authenticate with an application other than the
47 | default (PyData Auth). This argument is required to
48 | use APIs the track billing and quotas via the
49 | application (such as Cloud Vision), rather than
50 | billing the user (such as BigQuery does).
51 | --client_secret CLIENT_SECRET
52 | (Optional, but recommended) Client secret. Use this in
53 | combination with the --client-id argument to
54 | authenticate with an application other than the
55 | default (PyData Auth). This argument is required to
56 | use APIs the track billing and quotas via the
57 | application (such as Cloud Vision), rather than
58 | billing the user (such as BigQuery does).
59 | --use_local_webserver
60 | Use a local webserver for the user authentication.
61 | This starts a webserver on localhost, which allows the
62 | browser to pass a token directly to the program.
63 |
64 | Save credentials with Cloud Platform scope to ``~/keys/google-credentials.json``.
65 |
66 | .. code:: bash
67 |
68 | python -m pydata_google_auth login ~/keys/google-credentials.json
69 |
70 | Loading user credentials with ``print-token``
71 | ---------------------------------------------
72 |
73 | Print an access token associate with the credentials at
74 | ``~/keys/google-credentials.json``.
75 |
76 | .. code:: bash
77 |
78 | python -m pydata_google_auth print-token ~/keys/google-credentials.json
79 |
80 | Use ``curl`` and the ``credentials.json`` user credentials file to download
81 | the contents of ``gs://your-bucket/path/to/object.txt`` with the Google Cloud
82 | Storage JSON REST API.
83 |
84 | .. code:: bash
85 |
86 | curl -X GET \
87 | -H "Authorization: Bearer $(python -m pydata_google_auth print-token credentials.json)" \
88 | "https://storage.googleapis.com/storage/v1/b/your-bucket/o/path%%2Fto%%2Fobject.txt?alt=media"
89 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # pydata-google-auth documentation build configuration file, created by
4 | # sphinx-quickstart on Wed Feb 8 10:52:12 2017.
5 | #
6 | # This file is execfile()d with the current directory set to its
7 | # containing dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out
13 | # serve to show the default.
14 |
15 | # If extensions (or modules to document with autodoc) are in another directory,
16 | # add these directories to sys.path here. If the directory is relative to the
17 | # documentation root, use os.path.abspath to make it absolute, like shown here.
18 | #
19 | import os
20 | import sys
21 |
22 | # sys.path.insert(0, os.path.abspath('.'))
23 |
24 | # -- General configuration ------------------------------------------------
25 |
26 | # If your documentation needs a minimal Sphinx version, state it here.
27 | #
28 | # needs_sphinx = '1.0'
29 |
30 | # Add any Sphinx extension module names here, as strings. They can be
31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
32 | # ones.
33 | extensions = [
34 | "sphinx.ext.autodoc",
35 | "sphinx.ext.autosummary",
36 | "sphinx.ext.doctest",
37 | "sphinx.ext.extlinks",
38 | "sphinx.ext.todo",
39 | "sphinx.ext.napoleon",
40 | "sphinx.ext.intersphinx",
41 | "sphinx.ext.coverage",
42 | "sphinx.ext.ifconfig",
43 | ]
44 |
45 | # Add any paths that contain templates here, relative to this directory.
46 | templates_path = ["_templates"]
47 |
48 | # The suffix(es) of source filenames.
49 | # You can specify multiple suffix as a list of string:
50 | #
51 | # source_suffix = ['.rst', '.md']
52 | source_suffix = ".rst"
53 |
54 | # The encoding of source files.
55 | #
56 | # source_encoding = 'utf-8-sig'
57 |
58 | # The master toctree document.
59 | master_doc = "index"
60 |
61 | # General information about the project.
62 | project = "pydata-google-auth"
63 | copyright = "2017, PyData Development Team"
64 | author = "PyData Development Team"
65 |
66 | # The version info for the project you're documenting, acts as replacement for
67 | # |version| and |release|, also used in various other places throughout the
68 | # built documents.
69 | #
70 | # The short X.Y version.
71 | version = "0.1.0"
72 | # The full version, including alpha/beta/rc tags.
73 | release = "0.1.0"
74 |
75 | # The language for content autogenerated by Sphinx. Refer to documentation
76 | # for a list of supported languages.
77 | #
78 | # This is also used if you do content translation via gettext catalogs.
79 | # Usually you set "language" from the command line for these cases.
80 | language = "en"
81 |
82 | # There are two options for replacing |today|: either, you set today to some
83 | # non-false value, then it is used:
84 | #
85 | # today = ''
86 | #
87 | # Else, today_fmt is used as the format for a strftime call.
88 | #
89 | # today_fmt = '%B %d, %Y'
90 |
91 | # List of patterns, relative to source directory, that match files and
92 | # directories to ignore when looking for source files.
93 | # This patterns also effect to html_static_path and html_extra_path
94 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
95 |
96 | # The reST default role (used for this markup: `text`) to use for all
97 | # documents.
98 | #
99 | # default_role = None
100 |
101 | # If true, '()' will be appended to :func: etc. cross-reference text.
102 | #
103 | # add_function_parentheses = True
104 |
105 | # If true, the current module name will be prepended to all description
106 | # unit titles (such as .. function::).
107 | #
108 | # add_module_names = True
109 |
110 | # If true, sectionauthor and moduleauthor directives will be shown in the
111 | # output. They are ignored by default.
112 | #
113 | # show_authors = False
114 |
115 | # The name of the Pygments (syntax highlighting) style to use.
116 | pygments_style = "sphinx"
117 |
118 | # A list of ignored prefixes for module index sorting.
119 | # modindex_common_prefix = []
120 |
121 | # If true, keep warnings as "system message" paragraphs in the built documents.
122 | # keep_warnings = False
123 |
124 | # If true, `todo` and `todoList` produce output, else they produce nothing.
125 | todo_include_todos = False
126 |
127 |
128 | # -- Options for HTML output ----------------------------------------------
129 |
130 | # The theme to use for HTML and HTML Help pages. See the documentation for
131 | # a list of builtin themes.
132 | html_theme = "alabaster"
133 |
134 | # Theme options are theme-specific and customize the look and feel of a theme
135 | # further. For a list of options available for each theme, see the
136 | # documentation.
137 | #
138 | # html_theme_options = {}
139 |
140 | # Add any paths that contain custom themes here, relative to this directory.
141 | # html_theme_path = []
142 |
143 | # The name for this set of Sphinx documents.
144 | # " v documentation" by default.
145 | #
146 | # html_title = u'pydata-google-auth v0.1.0'
147 |
148 | # A shorter title for the navigation bar. Default is the same as html_title.
149 | #
150 | # html_short_title = None
151 |
152 | # The name of an image file (relative to this directory) to place at the top
153 | # of the sidebar.
154 | #
155 | # html_logo = None
156 |
157 | # The name of an image file (relative to this directory) to use as a favicon of
158 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
159 | # pixels large.
160 | #
161 | # html_favicon = None
162 |
163 | # Add any paths that contain custom static files (such as style sheets) here,
164 | # relative to this directory. They are copied after the builtin static files,
165 | # so a file named "default.css" will overwrite the builtin "default.css".
166 | html_static_path = ["_static"]
167 |
168 | # Add any extra paths that contain custom files (such as robots.txt or
169 | # .htaccess) here, relative to this directory. These files are copied
170 | # directly to the root of the documentation.
171 | #
172 | # html_extra_path = []
173 |
174 | html_js_files = [
175 | "js/authcodescripts.js",
176 | ]
177 |
178 | # If not None, a 'Last updated on:' timestamp is inserted at every page
179 | # bottom, using the given strftime format.
180 | # The empty string is equivalent to '%b %d, %Y'.
181 | #
182 | # html_last_updated_fmt = None
183 |
184 | # If true, SmartyPants will be used to convert quotes and dashes to
185 | # typographically correct entities.
186 | #
187 | # html_use_smartypants = True
188 |
189 | # Custom sidebar templates, maps document names to template names.
190 | #
191 | # html_sidebars = {}
192 |
193 | # Additional templates that should be rendered to pages, maps page names to
194 | # template names.
195 | #
196 | # html_additional_pages = {}
197 |
198 | # If false, no module index is generated.
199 | #
200 | # html_domain_indices = True
201 |
202 | # If false, no index is generated.
203 | #
204 | # html_use_index = True
205 |
206 | # If true, the index is split into individual pages for each letter.
207 | #
208 | # html_split_index = False
209 |
210 | # If true, links to the reST sources are added to the pages.
211 | #
212 | # html_show_sourcelink = True
213 |
214 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
215 | #
216 | # html_show_sphinx = True
217 |
218 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
219 | #
220 | # html_show_copyright = True
221 |
222 | # If true, an OpenSearch description file will be output, and all pages will
223 | # contain a tag referring to it. The value of this option must be the
224 | # base URL from which the finished HTML is served.
225 | #
226 | # html_use_opensearch = ''
227 |
228 | # This is the file name suffix for HTML files (e.g. ".xhtml").
229 | # html_file_suffix = None
230 |
231 | # Language to be used for generating the HTML full-text search index.
232 | # Sphinx supports the following languages:
233 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
234 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
235 | #
236 | # html_search_language = 'en'
237 |
238 | # A dictionary with options for the search language support, empty by default.
239 | # 'ja' uses this config value.
240 | # 'zh' user can custom change `jieba` dictionary path.
241 | #
242 | # html_search_options = {'type': 'default'}
243 |
244 | # The name of a javascript file (relative to the configuration directory) that
245 | # implements a search results scorer. If empty, the default will be used.
246 | #
247 | # html_search_scorer = 'scorer.js'
248 |
249 | # Output file base name for HTML help builder.
250 | htmlhelp_basename = "pydata-google-authdoc"
251 |
252 | # -- Options for LaTeX output ---------------------------------------------
253 |
254 | latex_elements = {
255 | # The paper size ('letterpaper' or 'a4paper').
256 | #
257 | # 'papersize': 'letterpaper',
258 | # The font size ('10pt', '11pt' or '12pt').
259 | #
260 | # 'pointsize': '10pt',
261 | # Additional stuff for the LaTeX preamble.
262 | #
263 | # 'preamble': '',
264 | # Latex figure (float) alignment
265 | #
266 | # 'figure_align': 'htbp',
267 | }
268 |
269 | # Grouping the document tree into LaTeX files. List of tuples
270 | # (source start file, target name, title,
271 | # author, documentclass [howto, manual, or own class]).
272 | latex_documents = [
273 | (
274 | master_doc,
275 | "pydata-google-auth.tex",
276 | "pydata-google-auth Documentation",
277 | "PyData Development Team",
278 | "manual",
279 | )
280 | ]
281 |
282 | # The name of an image file (relative to this directory) to place at the top of
283 | # the title page.
284 | #
285 | # latex_logo = None
286 |
287 | # For "manual" documents, if this is true, then toplevel headings are parts,
288 | # not chapters.
289 | #
290 | # latex_use_parts = False
291 |
292 | # If true, show page references after internal links.
293 | #
294 | # latex_show_pagerefs = False
295 |
296 | # If true, show URL addresses after external links.
297 | #
298 | # latex_show_urls = False
299 |
300 | # Documents to append as an appendix to all manuals.
301 | #
302 | # latex_appendices = []
303 |
304 | # It false, will not define \strong, \code, itleref, \crossref ... but only
305 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
306 | # packages.
307 | #
308 | # latex_keep_old_macro_names = True
309 |
310 | # If false, no module index is generated.
311 | #
312 | # latex_domain_indices = True
313 |
314 |
315 | # -- Options for manual page output ---------------------------------------
316 |
317 | # One entry per manual page. List of tuples
318 | # (source start file, name, description, authors, manual section).
319 | man_pages = [
320 | (master_doc, "pydata-google-auth", "pydata-google-auth Documentation", [author], 1)
321 | ]
322 |
323 | # If true, show URL addresses after external links.
324 | #
325 | # man_show_urls = False
326 |
327 |
328 | # -- Options for napoleon -------------------------------------------
329 |
330 | napoleon_google_docstring = False
331 | napoleon_numpy_docstring = True
332 |
333 |
334 | # -- Options for Texinfo output -------------------------------------------
335 |
336 | # Grouping the document tree into Texinfo files. List of tuples
337 | # (source start file, target name, title, author,
338 | # dir menu entry, description, category)
339 | texinfo_documents = [
340 | (
341 | master_doc,
342 | "pydata-google-auth",
343 | "pydata-google-auth Documentation",
344 | author,
345 | "pydata-google-auth",
346 | "One line description of project.",
347 | "Miscellaneous",
348 | )
349 | ]
350 |
351 | # Documents to append as an appendix to all manuals.
352 | #
353 | # texinfo_appendices = []
354 |
355 | # If false, no module index is generated.
356 | #
357 | # texinfo_domain_indices = True
358 |
359 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
360 | #
361 | # texinfo_show_urls = 'footnote'
362 |
363 | # If true, do not generate a @detailmenu in the "Top" node's menu.
364 | #
365 | # texinfo_no_detailmenu = False
366 |
367 | # Configuration for intersphinx:
368 | intersphinx_mapping = {
369 | "https://docs.python.org/": None,
370 | "https://google-auth.readthedocs.io/en/latest/": None,
371 | "https://google-auth-oauthlib.readthedocs.io/en/latest/": None,
372 | }
373 |
374 | extlinks = {
375 | "issue": ("https://github.com/pydata/pydata-google-auth/issues/%s", "GH#"),
376 | "pr": ("https://github.com/pydata/pydata-google-auth/pull/%s", "GH#"),
377 | }
378 |
--------------------------------------------------------------------------------
/docs/source/contributing.rst:
--------------------------------------------------------------------------------
1 | .. _contributing:
2 |
3 | **********************************
4 | Contributing to pydata-google-auth
5 | **********************************
6 |
7 | .. contents:: Table of contents:
8 | :local:
9 |
10 | Where to start?
11 | ===============
12 |
13 | All contributions, bug reports, bug fixes, documentation improvements,
14 | enhancements and ideas are welcome.
15 |
16 | If you are simply looking to start working with the *pydata-google-auth* codebase, navigate to the
17 | `GitHub "issues" tab `_ and start looking through
18 | interesting issues.
19 |
20 | Or maybe through using *pydata-google-auth* you have an idea of your own or are looking for something
21 | in the documentation and thinking 'this can be improved'...you can do something about it!
22 |
23 | Feel free to ask questions on the `mailing list
24 | `_.
25 |
26 | Bug reports and enhancement requests
27 | ====================================
28 |
29 | Bug reports are an important part of making *pydata-google-auth* more stable. Having a complete bug report
30 | will allow others to reproduce the bug and provide insight into fixing it. Because many versions of
31 | *pydata-google-auth* are supported, knowing version information will also identify improvements made since
32 | previous versions. Trying the bug-producing code out on the *master* branch is often a worthwhile exercise
33 | to confirm the bug still exists. It is also worth searching existing bug reports and pull requests
34 | to see if the issue has already been reported and/or fixed.
35 |
36 | Bug reports must:
37 |
38 | #. Include a short, self-contained Python snippet reproducing the problem.
39 | You can format the code nicely by using `GitHub Flavored Markdown
40 | `__
41 |
42 | .. code-block:: python
43 |
44 | >>> import pydata_google_auth
45 | >>> creds, proj = pydata_google_auth.default(...)
46 | ...
47 |
48 | #. Include the full version string of *pydata-google-auth*.
49 |
50 | .. code-block:: python
51 |
52 | >>> import pydata_google_auth
53 | >>> pydata_google_auth.__version__
54 | ...
55 |
56 | #. Explain why the current behavior is wrong/not desired and what you expect instead.
57 |
58 | The issue will then show up to the *pydata-google-auth* community and be open to comments/ideas from others.
59 |
60 | Working with the code
61 | =====================
62 |
63 | Now that you have an issue you want to fix, enhancement to add, or documentation to improve,
64 | you need to learn how to work with GitHub and the *pydata-google-auth* code base.
65 |
66 | Version control, Git, and GitHub
67 | --------------------------------
68 |
69 | To the new user, working with Git is one of the more daunting aspects of contributing to *pydata-google-auth*.
70 | It can very quickly become overwhelming, but sticking to the guidelines below will help keep the process
71 | straightforward and mostly trouble free. As always, if you are having difficulties please
72 | feel free to ask for help.
73 |
74 | The code is hosted on `GitHub `_. To
75 | contribute you will need to sign up for a `free GitHub account
76 | `_. We use `Git `_ for
77 | version control to allow many people to work together on the project.
78 |
79 | Some great resources for learning Git:
80 |
81 | * the `GitHub help pages `_.
82 | * the `NumPy's documentation `_.
83 | * Matthew Brett's `Pydagogue `_.
84 |
85 | Getting started with Git
86 | ------------------------
87 |
88 | `GitHub has instructions `__ for installing git,
89 | setting up your SSH key, and configuring git. All these steps need to be completed before
90 | you can work seamlessly between your local repository and GitHub.
91 |
92 | .. _contributing.forking:
93 |
94 | Forking
95 | -------
96 |
97 | You will need your own fork to work on the code. Go to the `pydata-google-auth project
98 | page `_ and hit the ``Fork`` button. You will
99 | want to clone your fork to your machine::
100 |
101 | git clone git@github.com:your-user-name/pydata-google-auth.git pydata-google-auth-yourname
102 | cd pydata-google-auth-yourname
103 | git remote add upstream git://github.com/pydata/pydata-google-auth.git
104 |
105 | This creates the directory `pydata-google-auth-yourname` and connects your repository to
106 | the upstream (main project) *pydata-google-auth* repository.
107 |
108 | The testing suite will run automatically on CircleCI once your pull request is submitted.
109 | However, if you wish to run the test suite on a branch prior to submitting the pull request,
110 | then CircleCI needs to be hooked up to your GitHub repository. Instructions for doing so
111 | are `here `__..
112 |
113 | Creating a branch
114 | -----------------
115 |
116 | You want your master branch to reflect only production-ready code, so create a
117 | feature branch for making your changes. For example::
118 |
119 | git branch shiny-new-feature
120 | git checkout shiny-new-feature
121 |
122 | The above can be simplified to::
123 |
124 | git checkout -b shiny-new-feature
125 |
126 | This changes your working directory to the shiny-new-feature branch. Keep any
127 | changes in this branch specific to one bug or feature so it is clear
128 | what the branch brings to *pydata-google-auth*. You can have many shiny-new-features
129 | and switch in between them using the git checkout command.
130 |
131 | To update this branch, you need to retrieve the changes from the master branch::
132 |
133 | git fetch upstream
134 | git rebase upstream/master
135 |
136 | This will replay your commits on top of the latest pydata-google-auth git master. If this
137 | leads to merge conflicts, you must resolve these before submitting your pull
138 | request. If you have uncommitted changes, you will need to ``stash`` them prior
139 | to updating. This will effectively store your changes and they can be reapplied
140 | after updating.
141 |
142 | Install in Development Mode
143 | ---------------------------
144 |
145 | It's helpful to install pydata-google-auth in development mode so that you can
146 | use the library without reinstalling the package after every change.
147 |
148 | Conda
149 | ~~~~~
150 |
151 | Create a new conda environment and install the necessary dependencies
152 |
153 | .. code-block:: shell
154 |
155 | $ conda create -n my-env --channel conda-forge \
156 | google-auth-oauthlib \
157 | google-api-python-client \
158 | google-auth-httplib2
159 | $ source activate my-env
160 |
161 | Install pydata-google-auth in development mode
162 |
163 | .. code-block:: shell
164 |
165 | $ python setup.py develop
166 |
167 | Pip & virtualenv
168 | ~~~~~~~~~~~~~~~~
169 |
170 | *Skip this section if you already followed the conda instructions.*
171 |
172 | Create a new `virtual
173 | environment `__.
174 |
175 | .. code-block:: shell
176 |
177 | $ virtualenv env
178 | $ source env/bin/activate
179 |
180 | You can install pydata-google-auth and its dependencies in `development mode via
181 | pip `__.
182 |
183 | .. code-block:: shell
184 |
185 | $ pip install -e .
186 |
187 | Contributing to the code base
188 | =============================
189 |
190 | .. contents:: Code Base:
191 | :local:
192 |
193 | Code standards
194 | --------------
195 |
196 | Writing good code is not just about what you write. It is also about *how* you
197 | write it. During testing on CircleCI, several tools will be run to check your
198 | code for stylistic errors. Generating any warnings will cause the test to fail.
199 | Thus, good style is a requirement for submitting code to *pydata-google-auth*.
200 |
201 | In addition, because a lot of people use our library, it is important that we
202 | do not make sudden changes to the code that could have the potential to break
203 | a lot of user code as a result, that is, we need it to be as *backwards compatible*
204 | as possible to avoid mass breakages.
205 |
206 | Python (PEP8)
207 | ~~~~~~~~~~~~~
208 |
209 | *pydata-google-auth* uses the `PEP8 `_ standard.
210 | There are several tools to ensure you abide by this standard. Here are *some* of
211 | the more common ``PEP8`` issues:
212 |
213 | - we restrict line-length to 79 characters to promote readability
214 | - passing arguments should have spaces after commas, e.g. ``foo(arg1, arg2, kw1='bar')``
215 |
216 | CircleCI will run the `flake8 `_ tool and
217 | the `'black' code formatting tool `_ to report
218 | any stylistic errors in your code. Therefore, it is helpful before submitting
219 | code to run the check yourself on the diff::
220 |
221 | black .
222 | git diff master | flake8 --diff
223 |
224 | Backwards Compatibility
225 | ~~~~~~~~~~~~~~~~~~~~~~~
226 |
227 | Please try to maintain backward compatibility. If you think breakage is required,
228 | clearly state why as part of the pull request. Also, be careful when changing method
229 | signatures and add deprecation warnings where needed.
230 |
231 | Test-driven development/code writing
232 | ------------------------------------
233 |
234 | *pydata-google-auth* is serious about testing and strongly encourages contributors to embrace
235 | `test-driven development (TDD) `_.
236 | This development process "relies on the repetition of a very short development cycle:
237 | first the developer writes an (initially failing) automated test case that defines a desired
238 | improvement or new function, then produces the minimum amount of code to pass that test."
239 | So, before actually writing any code, you should write your tests. Often the test can be
240 | taken from the original GitHub issue. However, it is always worth considering additional
241 | use cases and writing corresponding tests.
242 |
243 | Adding tests is one of the most common requests after code is pushed to *pydata-google-auth*. Therefore,
244 | it is worth getting in the habit of writing tests ahead of time so this is never an issue.
245 |
246 | Like many packages, *pydata-google-auth* uses `pytest `_.
247 |
248 | Running the test suite
249 | ~~~~~~~~~~~~~~~~~~~~~~
250 |
251 | The tests can then be run directly inside your Git clone (without having to
252 | install *pydata-google-auth*) by typing::
253 |
254 | pytest tests/unit
255 | pytest tests/system.py
256 |
257 | The tests suite is exhaustive and takes around 20 minutes to run. Often it is
258 | worth running only a subset of tests first around your changes before running the
259 | entire suite.
260 |
261 | The easiest way to do this is with::
262 |
263 | pytest tests/path/to/test.py -k regex_matching_test_name
264 |
265 | Or with one of the following constructs::
266 |
267 | pytest tests/[test-module].py
268 | pytest tests/[test-module].py::[TestClass]
269 | pytest tests/[test-module].py::[TestClass]::[test_method]
270 |
271 | For more, see the `pytest `_ documentation.
272 |
273 | Testing on multiple Python versions
274 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
275 |
276 | pydata-google-auth uses `nox `__ to automate testing in
277 | multiple Python environments. First, install nox.
278 |
279 | .. code-block:: shell
280 |
281 | $ pip install --upgrade nox
282 |
283 | To run tests in all versions of Python, run `nox` from the repository's root
284 | directory.
285 |
286 | Documenting your code
287 | ---------------------
288 |
289 | Changes should be reflected in the release notes located in ``doc/source/changelog.rst``.
290 | This file contains an ongoing change log. Add an entry to this file to document your fix,
291 | enhancement or (unavoidable) breaking change. Make sure to include the GitHub issue number
292 | when adding your entry (using `` :issue:`1234` `` where `1234` is the issue/pull request number).
293 |
294 | If your code is an enhancement, it is most likely necessary to add usage
295 | examples to the existing documentation. Further, to let users know when
296 | this feature was added, the ``versionadded`` directive is used. The sphinx
297 | syntax for that is:
298 |
299 | .. code-block:: rst
300 |
301 | .. versionadded:: 0.1.3
302 |
303 | This will put the text *New in version 0.1.3* wherever you put the sphinx
304 | directive. This should also be put in the docstring when adding a new function
305 | or method.
306 |
307 | Contributing your changes to *pydata-google-auth*
308 | =================================================
309 |
310 | Committing your code
311 | --------------------
312 |
313 | Keep style fixes to a separate commit to make your pull request more readable.
314 |
315 | Once you've made changes, you can see them by typing::
316 |
317 | git status
318 |
319 | If you have created a new file, it is not being tracked by git. Add it by typing::
320 |
321 | git add path/to/file-to-be-added.py
322 |
323 | Doing 'git status' again should give something like::
324 |
325 | # On branch shiny-new-feature
326 | #
327 | # modified: /relative/path/to/file-you-added.py
328 | #
329 |
330 | Finally, commit your changes to your local repository with an explanatory message. *pydata-google-auth*
331 | uses a convention for commit message prefixes and layout. Here are
332 | some common prefixes along with general guidelines for when to use them:
333 |
334 | * ENH: Enhancement, new functionality
335 | * BUG: Bug fix
336 | * DOC: Additions/updates to documentation
337 | * TST: Additions/updates to tests
338 | * BLD: Updates to the build process/scripts
339 | * PERF: Performance improvement
340 | * CLN: Code cleanup
341 |
342 | The following defines how a commit message should be structured. Please reference the
343 | relevant GitHub issues in your commit message using GH1234 or #1234. Either style
344 | is fine, but the former is generally preferred:
345 |
346 | * a subject line with `< 80` chars.
347 | * One blank line.
348 | * Optionally, a commit message body.
349 |
350 | Now you can commit your changes in your local repository::
351 |
352 | git commit -m
353 |
354 | Combining commits
355 | -----------------
356 |
357 | If you have multiple commits, you may want to combine them into one commit, often
358 | referred to as "squashing" or "rebasing". This is a common request by package maintainers
359 | when submitting a pull request as it maintains a more compact commit history. To rebase
360 | your commits::
361 |
362 | git rebase -i HEAD~#
363 |
364 | Where # is the number of commits you want to combine. Then you can pick the relevant
365 | commit message and discard others.
366 |
367 | To squash to the master branch do::
368 |
369 | git rebase -i master
370 |
371 | Use the ``s`` option on a commit to ``squash``, meaning to keep the commit messages,
372 | or ``f`` to ``fixup``, meaning to merge the commit messages.
373 |
374 | Then you will need to push the branch (see below) forcefully to replace the current
375 | commits with the new ones::
376 |
377 | git push origin shiny-new-feature -f
378 |
379 |
380 | Pushing your changes
381 | --------------------
382 |
383 | When you want your changes to appear publicly on your GitHub page, push your
384 | forked feature branch's commits::
385 |
386 | git push origin shiny-new-feature
387 |
388 | Here ``origin`` is the default name given to your remote repository on GitHub.
389 | You can see the remote repositories::
390 |
391 | git remote -v
392 |
393 | If you added the upstream repository as described above you will see something
394 | like::
395 |
396 | origin git@github.com:yourname/pydata-google-auth.git (fetch)
397 | origin git@github.com:yourname/pydata-google-auth.git (push)
398 | upstream git://github.com/pydata/pydata-google-auth.git (fetch)
399 | upstream git://github.com/pydata/pydata-google-auth.git (push)
400 |
401 | Now your code is on GitHub, but it is not yet a part of the *pydata-google-auth* project. For that to
402 | happen, a pull request needs to be submitted on GitHub.
403 |
404 | Review your code
405 | ----------------
406 |
407 | When you're ready to ask for a code review, file a pull request. Before you do, once
408 | again make sure that you have followed all the guidelines outlined in this document
409 | regarding code style, tests, performance tests, and documentation. You should also
410 | double check your branch changes against the branch it was based on:
411 |
412 | #. Navigate to your repository on GitHub -- https://github.com/your-user-name/pydata-google-auth
413 | #. Click on ``Branches``
414 | #. Click on the ``Compare`` button for your feature branch
415 | #. Select the ``base`` and ``compare`` branches, if necessary. This will be ``master`` and
416 | ``shiny-new-feature``, respectively.
417 |
418 | Finally, make the pull request
419 | ------------------------------
420 |
421 | If everything looks good, you are ready to make a pull request. A pull request is how
422 | code from a local repository becomes available to the GitHub community and can be looked
423 | at and eventually merged into the master version. This pull request and its associated
424 | changes will eventually be committed to the master branch and available in the next
425 | release. To submit a pull request:
426 |
427 | #. Navigate to your repository on GitHub
428 | #. Click on the ``Pull Request`` button
429 | #. You can then click on ``Commits`` and ``Files Changed`` to make sure everything looks
430 | okay one last time
431 | #. Write a description of your changes in the ``Preview Discussion`` tab
432 | #. Click ``Send Pull Request``.
433 |
434 | This request then goes to the repository maintainers, and they will review
435 | the code. If you need to make more changes, you can make them in
436 | your branch, push them to GitHub, and the pull request will be automatically
437 | updated. Pushing them to GitHub again is done by::
438 |
439 | git push -f origin shiny-new-feature
440 |
441 | This will automatically update your pull request with the latest code and restart the
442 | CircleCI tests.
443 |
444 | Delete your merged branch (optional)
445 | ------------------------------------
446 |
447 | Once your feature branch is accepted into upstream, you'll probably want to get rid of
448 | the branch. First, merge upstream master into your branch so git knows it is safe to
449 | delete your branch::
450 |
451 | git fetch upstream
452 | git checkout master
453 | git merge upstream/master
454 |
455 | Then you can just do::
456 |
457 | git branch -d shiny-new-feature
458 |
459 | Make sure you use a lower-case ``-d``, or else git won't warn you if your feature
460 | branch has not actually been merged.
461 |
462 | The branch will still exist on GitHub, so to delete it there do::
463 |
464 | git push origin --delete shiny-new-feature
465 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. pydata-google-auth documentation master file, created by
2 | sphinx-quickstart on Wed Feb 8 10:52:12 2017.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to pypdata-google-auth's documentation!
7 | ===============================================
8 |
9 | The :mod:`pydata_google_auth` module provides a wrapper to authenticate to
10 | Google APIs, such as Google BigQuery.
11 |
12 | Contents:
13 |
14 | .. toctree::
15 | :maxdepth: 2
16 |
17 | install.rst
18 | intro.rst
19 | cli.rst
20 | api.rst
21 | contributing.rst
22 | changelog.rst
23 | privacy.rst
24 |
25 |
26 | Indices and tables
27 | ==================
28 |
29 | * :ref:`genindex`
30 | * :ref:`modindex`
31 | * :ref:`search`
32 |
33 | .. Use the meta tags to verify the site for use in Google OAuth2 consent flow.
34 |
35 | .. meta::
36 | :google-site-verification: 9QSsa9ahOZHbdwZAwl7x-Daaj1W9AttkUOeDgzKtxBw
37 |
--------------------------------------------------------------------------------
/docs/source/install.rst:
--------------------------------------------------------------------------------
1 | Installation
2 | ============
3 |
4 | You can install pydata-google-auth with ``conda``, ``pip``, or by installing from source.
5 |
6 | Conda
7 | -----
8 |
9 | .. code-block:: shell
10 |
11 | $ conda install pydata-google-auth --channel conda-forge
12 |
13 | This installs pydata-google-auth and all common dependencies, including ``google-auth``.
14 |
15 | Pip
16 | ---
17 |
18 | To install the latest version of pydata-google-auth: from the
19 |
20 | .. code-block:: shell
21 |
22 | $ pip install pydata-google-auth -U
23 |
24 | This installs pydata-google-auth and all common dependencies, including ``google-auth``.
25 |
26 |
27 | Install from Source
28 | -------------------
29 |
30 | .. code-block:: shell
31 |
32 | $ pip install git+https://github.com/pydata/pydata-google-auth.git
33 |
34 |
35 | Dependencies
36 | ------------
37 |
38 | This module requires following additional dependencies:
39 |
40 | - `google-auth `__: authentication and authorization for Google's API
41 | - `google-auth-oauthlib `__: integration with `oauthlib `__ for end-user authentication
42 |
--------------------------------------------------------------------------------
/docs/source/intro.rst:
--------------------------------------------------------------------------------
1 | Introduction
2 | ============
3 |
4 | pydata-google-auth wraps the `google-auth
5 | `_ and `google-auth-oauthlib
6 | `_ libraries to make it easier
7 | to get and cache user credentials for accessing the Google APIs from
8 | locally-installed data tools and libraries.
9 |
10 | .. warning::
11 |
12 | To use this module, you will need a Google account and developer project.
13 | Follow the `Using the BigQuery sandbox
14 | `_ instructions to get
15 | started with big data on Google Cloud without a credit card.
16 |
17 | See the `Google Cloud Platform authentication guide
18 | `_ for best practices on
19 | authentication in production server contexts.
20 |
21 | User credentials
22 | ----------------
23 |
24 | Use the :func:`pydata_google_auth.get_user_credentials` to get user
25 | credentials, authenticated to Google APIs.
26 |
27 | By default, pydata-google-auth will listen for the credentials on a local
28 | webserver, which is used as the redirect page from Google's OAuth 2.0 flow.
29 | When you set ``use_local_webserver=False``, pydata-google-auth will request
30 | that you copy a token from the :ref:`oauth-sign-in` page.
31 |
32 | .. toctree::
33 | :maxdepth: 2
34 |
35 | oauth.rst
36 |
37 | Default credentials
38 | -------------------
39 |
40 | Data library and tool authors can use the :func:`pydata_google_auth.default`
41 | function to get `Application Default Credentials
42 | `_
43 | and fallback to user credentials when no valid Application Default
44 | Credentials are found.
45 |
46 | When wrapping the :func:`pydata_google_auth.default` method for use in your
47 | tool or library, please provide your own client ID and client secret. Enable
48 | the APIs your users will need in the project which owns the client ID and
49 | secrets. Note that some APIs, such as Cloud Vision, bill the *client*
50 | project. Verify that the API you are enabling bills the user's project not
51 | the client project.
52 |
--------------------------------------------------------------------------------
/docs/source/oauth.rst:
--------------------------------------------------------------------------------
1 | .. image:: https://lh3.googleusercontent.com/n4u3LcbRm3yvTK-EzYqGGtqHBf83KnfY14-3z9mIPRCrIKv-K4ieqJVLYl-yVM7H5EM
2 | :alt: pydata logo
3 | :class: logo
4 |
5 |
6 | .. _oauth-sign-in:
7 |
8 | Sign in to Google Cloud Platform
9 | ================================
10 |
11 | You are seeing this page because you are attempting to access Google Cloud Platform
12 | resources via one of several possible methods, including:
13 |
14 | * the ``pydata-google-auth`` library
15 |
16 | OR a ``pandas`` library helper function such as:
17 |
18 | * ``pandas.DataFrame.to_gbq()``
19 | * ``pandas.read_gbq()``
20 |
21 | OR a Jupyter/IPython magics command such as:
22 |
23 | * ``%%bigquery`` / ``%%bqsql``
24 | * ``%%spanner_graph``
25 |
26 | from this or another machine. If this is not the case, close this tab.
27 |
28 | Enter the following verification code in the CommandLine Interface (CLI) on the
29 | machine you want to log into. This is a credential **similar to your password**
30 | and should not be shared with others.
31 |
32 |
33 | .. raw:: html
34 |
35 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | .. hint::
46 |
47 | You can close this tab when you’re done.
48 |
--------------------------------------------------------------------------------
/docs/source/privacy.rst:
--------------------------------------------------------------------------------
1 | Privacy
2 | =======
3 |
4 | This package is a `PyData project `_ and is subject to
5 | the `NumFocus privacy policy `_. Your
6 | use of Google APIs with this module is subject to each API's respective
7 | `terms of service `_.
8 |
9 | Google account and user data
10 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
11 |
12 | Accessing user data
13 | ~~~~~~~~~~~~~~~~~~~
14 |
15 | The :mod:`pydata_google_auth` module accesses your Google user account, with
16 | the list of `scopes
17 | `_ that you
18 | specify. Depending on your specified list of scopes, the credentials returned
19 | by this library may provide access to other user data, such as your email
20 | address, Google Cloud Platform resources, Google Drive files, or Google
21 | Sheets.
22 |
23 | Storing user data
24 | ~~~~~~~~~~~~~~~~~
25 |
26 | By default, your credentials are stored by the
27 | :class:`pydata_google_auth.cache.READ_WRITE` class to a local file, such as
28 | ``~/.config/pydata``. All user data is stored on your local machine. **Use
29 | caution when using this library on a shared machine**.
30 |
31 | Sharing user data
32 | ~~~~~~~~~~~~~~~~~
33 |
34 | The pydata-google-auth library only communicates with Google APIs. No user
35 | data is shared with PyData, NumFocus, or any other servers.
36 |
37 | Policies for application authors
38 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
39 |
40 | Do not use the default client ID when using the pydata-google-auth library
41 | from an application, library, or tool. Per the `Google User Data Policy
42 | `_, your
43 | application must accurately represent itself when authenticating to Google
44 | API servcies.
45 |
--------------------------------------------------------------------------------
/noxfile.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2018 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # https://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | # Generated by synthtool. DO NOT EDIT!
18 |
19 | from __future__ import absolute_import
20 | import os
21 | import pathlib
22 | import shutil
23 |
24 | import nox
25 |
26 |
27 | BLACK_VERSION = "black==22.12.0"
28 | BLACK_PATHS = ["docs", "pydata_google_auth", "tests", "noxfile.py", "setup.py"]
29 |
30 | SPHINX_VERSION = "sphinx==4.5.0"
31 |
32 | DEFAULT_PYTHON_VERSION = "3.10"
33 | SYSTEM_TEST_PYTHON_VERSIONS = ["3.9", "3.13"]
34 | UNIT_TEST_PYTHON_VERSIONS = ["3.9", "3.10", "3.11", "3.12", "3.13"]
35 |
36 | CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
37 |
38 | # 'docfx' is excluded since it only needs to run in 'docs-presubmit'
39 | nox.options.sessions = [
40 | "unit",
41 | "system",
42 | "cover",
43 | "lint",
44 | "lint_setup_py",
45 | "blacken",
46 | "docs",
47 | ]
48 |
49 | # Error if a python version is missing
50 | nox.options.error_on_missing_interpreters = True
51 |
52 |
53 | @nox.session(python=DEFAULT_PYTHON_VERSION)
54 | def lint(session):
55 | """Run linters.
56 |
57 | Returns a failure if the linters find linting errors or sufficiently
58 | serious code quality issues.
59 | """
60 | session.install("flake8", BLACK_VERSION)
61 | session.run(
62 | "black",
63 | "--check",
64 | *BLACK_PATHS,
65 | )
66 | session.run("flake8", "--append-config", ".flake8", "pydata_google_auth", "tests")
67 |
68 |
69 | @nox.session(python=DEFAULT_PYTHON_VERSION)
70 | def blacken(session):
71 | """Run black. Format code to uniform standard."""
72 | session.install(BLACK_VERSION)
73 | session.run(
74 | "black",
75 | *BLACK_PATHS,
76 | )
77 |
78 |
79 | @nox.session(python=DEFAULT_PYTHON_VERSION)
80 | def lint_setup_py(session):
81 | """Verify that setup.py is valid (including RST check)."""
82 | session.install("docutils", "pygments")
83 | session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
84 |
85 |
86 | def default(session):
87 | # Install all test dependencies, then install this package in-place.
88 |
89 | constraints_path = str(
90 | CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
91 | )
92 | session.install(
93 | "mock",
94 | "pyfakefs",
95 | "pytest",
96 | "pytest-cov",
97 | "-c",
98 | constraints_path,
99 | )
100 |
101 | session.install("-e", ".", "-c", constraints_path)
102 |
103 | # Run py.test against the unit tests.
104 | session.run(
105 | "py.test",
106 | "--quiet",
107 | f"--junitxml=unit_{session.python}_sponge_log.xml",
108 | "--cov=pydata_google_auth",
109 | "--cov=tests/unit",
110 | "--cov-append",
111 | "--cov-config=.coveragerc",
112 | "--cov-report=",
113 | "--cov-fail-under=0",
114 | os.path.join("tests", "unit"),
115 | *session.posargs,
116 | )
117 |
118 |
119 | @nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
120 | def unit(session):
121 | """Run the unit test suite."""
122 | default(session)
123 |
124 |
125 | @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
126 | def system(session):
127 | """Run the system test suite."""
128 | constraints_path = str(
129 | CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
130 | )
131 | system_test_path = os.path.join("tests", "system.py")
132 | system_test_folder_path = os.path.join("tests", "system")
133 |
134 | # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
135 | if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
136 | session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
137 | # Install pyopenssl for mTLS testing.
138 | if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
139 | session.install("pyopenssl")
140 |
141 | system_test_exists = os.path.exists(system_test_path)
142 | system_test_folder_exists = os.path.exists(system_test_folder_path)
143 | # Sanity check: only run tests if found.
144 | if not system_test_exists and not system_test_folder_exists:
145 | session.skip("System tests were not found")
146 |
147 | # Use pre-release gRPC for system tests.
148 | session.install("--pre", "grpcio")
149 |
150 | # Install all test dependencies, then install this package into the
151 | # virtualenv's dist-packages.
152 | session.install(
153 | "mock",
154 | "pyfakefs",
155 | "pytest",
156 | "pytest-cov",
157 | "google-cloud-testutils",
158 | "-c",
159 | constraints_path,
160 | )
161 | session.install("-e", ".", "-c", constraints_path)
162 |
163 | # Run py.test against the system tests.
164 | session.run(
165 | "py.test",
166 | "--quiet",
167 | f"--junitxml=system_{session.python}_sponge_log.xml",
168 | "--cov=pydata_google_auth",
169 | "--cov=tests/system",
170 | "--cov-append",
171 | "--cov-config=.coveragerc",
172 | "--cov-report=",
173 | "--cov-fail-under=0",
174 | system_test_folder_path,
175 | *session.posargs,
176 | )
177 |
178 |
179 | @nox.session(python=DEFAULT_PYTHON_VERSION)
180 | def cover(session):
181 | """Run the final coverage report.
182 |
183 | This outputs the coverage report aggregating coverage from the unit
184 | test runs (not system test runs), and then erases coverage data.
185 | """
186 | session.install("coverage", "pytest-cov")
187 | session.run("coverage", "report", "--show-missing", "--fail-under=60")
188 |
189 | session.run("coverage", "erase")
190 |
191 |
192 | @nox.session(python=DEFAULT_PYTHON_VERSION)
193 | def docs(session):
194 | """Build the docs for this library."""
195 |
196 | session.install("-e", ".")
197 | session.install(
198 | # We need to pin to specific versions of the `sphinxcontrib-*` packages
199 | # which still support sphinx 4.x.
200 | # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
201 | # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
202 | "sphinxcontrib-applehelp==1.0.4",
203 | "sphinxcontrib-devhelp==1.0.2",
204 | "sphinxcontrib-htmlhelp==2.0.1",
205 | "sphinxcontrib-qthelp==1.0.3",
206 | "sphinxcontrib-serializinghtml==1.1.5",
207 | SPHINX_VERSION,
208 | "alabaster",
209 | "recommonmark",
210 | )
211 |
212 | shutil.rmtree(os.path.join("docs", "source", "_build"), ignore_errors=True)
213 | session.run(
214 | "sphinx-build",
215 | "-W", # warnings as errors
216 | "-T", # show full traceback on exception
217 | "-N", # no colors
218 | "-b",
219 | "html",
220 | "-d",
221 | os.path.join("docs", "source", "_build", "doctrees", ""),
222 | os.path.join("docs", "source", ""),
223 | os.path.join("docs", "source", "_build", "html", ""),
224 | )
225 |
--------------------------------------------------------------------------------
/pydata_google_auth/__init__.py:
--------------------------------------------------------------------------------
1 | from .auth import default
2 | from .auth import get_user_credentials
3 | from .auth import load_user_credentials
4 | from .auth import save_user_credentials
5 | from .auth import load_service_account_credentials
6 | from ._version import get_versions
7 |
8 | versions = get_versions()
9 | __version__ = versions.get("closest-tag", versions["version"])
10 | __git_revision__ = versions["full-revisionid"]
11 |
12 | """pydata-google-auth
13 |
14 | This package provides helpers for fetching Google API credentials.
15 | """
16 |
17 | __all__ = [
18 | "__version__",
19 | "__git_revision__",
20 | "default",
21 | "get_user_credentials",
22 | "load_user_credentials",
23 | "save_user_credentials",
24 | "load_service_account_credentials",
25 | ]
26 |
--------------------------------------------------------------------------------
/pydata_google_auth/__main__.py:
--------------------------------------------------------------------------------
1 | """Private module that implements a pydata-google-auth CLI tool."""
2 |
3 | import argparse
4 | import sys
5 |
6 | from . import auth
7 |
8 |
9 | LOGIN_HELP = (
10 | "Login to Google and save user credentials as a JSON file to use as "
11 | "Application Default Credentials."
12 | )
13 | LOGIN_SCOPES_DEFAULT = "https://www.googleapis.com/auth/cloud-platform"
14 | LOGIN_SCOPES_HELP = (
15 | "Comma-separated list of scopes (permissions) to request from Google. "
16 | "See: https://developers.google.com/identity/protocols/googlescopes for "
17 | "a list of available scopes. Default: {}"
18 | ).format(LOGIN_SCOPES_DEFAULT)
19 | LOGIN_CLIENT_ID_HELP_TEMPLATE = (
20 | "(Optional, but recommended) Client {}. Use this in combination with "
21 | "the {other} argument to authenticate with an application other than the "
22 | "default (PyData Auth). This argument is required to use APIs the track "
23 | "billing and quotas via the application (such as Cloud Vision), rather "
24 | "than billing the user (such as BigQuery does)."
25 | )
26 | LOGIN_CLIENT_ID_HELP = LOGIN_CLIENT_ID_HELP_TEMPLATE.format(
27 | "ID", other="--client-secret"
28 | )
29 | LOGIN_CLIENT_SECRET_HELP = LOGIN_CLIENT_ID_HELP_TEMPLATE.format(
30 | "secret", other="--client-id"
31 | )
32 | LOGIN_USE_LOCAL_WEBSERVER_HELP = (
33 | "Use a local webserver for the user authentication. This starts "
34 | "a webserver on localhost with a port between 8080 and 8089, "
35 | "inclusive, which allows the browser to pass a token directly to the "
36 | "program."
37 | )
38 |
39 | PRINT_TOKEN_HELP = "Load a credentials JSON file and print an access token."
40 | PRINT_TOKEN_DESCRIPTION = r"""examples:
41 |
42 | Download the contents of gs://your-bucket/path/to/object.txt with the Google
43 | Cloud Storage JSON REST API.
44 |
45 | curl -X GET \
46 | -H "Authorization: Bearer $(python -m pydata_google_auth print-token credentials.json)" \
47 | "https://storage.googleapis.com/storage/v1/b/your-bucket/o/path%%2Fto%%2Fobject.txt?alt=media"
48 | """
49 |
50 |
51 | def login(args):
52 | scopes = args.scopes.split(",")
53 | auth.save_user_credentials(
54 | scopes,
55 | args.destination,
56 | client_id=args.client_id,
57 | client_secret=args.client_secret,
58 | use_local_webserver=not args.nouse_local_webserver,
59 | )
60 |
61 |
62 | def print_token(args):
63 | credentials = auth.load_user_credentials(args.credentials_path)
64 | print(credentials.token)
65 |
66 |
67 | parser = argparse.ArgumentParser(
68 | prog="python -m pydata_google_auth",
69 | description="Manage credentials for Google APIs.",
70 | )
71 | subparsers = parser.add_subparsers(title="commands", dest="command")
72 |
73 | login_parser = subparsers.add_parser("login", help=LOGIN_HELP)
74 | login_parser.add_argument(
75 | "destination", help="Path of where to save user credentials JSON file."
76 | )
77 | login_parser.add_argument(
78 | "--scopes", help=LOGIN_SCOPES_HELP, default=LOGIN_SCOPES_DEFAULT
79 | )
80 | login_parser.add_argument("--client_id", help=LOGIN_CLIENT_ID_HELP)
81 | login_parser.add_argument("--client_secret", help=LOGIN_CLIENT_SECRET_HELP)
82 | login_parser.add_argument(
83 | "--use_local_webserver",
84 | action="store_true",
85 | help="Ignored. Defaults to true. To disable, set --nouse_local_webserver option.",
86 | )
87 | login_parser.add_argument(
88 | "--nouse_local_webserver", action="store_true", help=LOGIN_USE_LOCAL_WEBSERVER_HELP
89 | )
90 |
91 | print_token_parser = subparsers.add_parser(
92 | "print-token",
93 | help=PRINT_TOKEN_HELP,
94 | description=PRINT_TOKEN_DESCRIPTION,
95 | formatter_class=argparse.RawDescriptionHelpFormatter,
96 | )
97 | print_token_parser.add_argument(
98 | "credentials_path", help="Path of credentials JSON file."
99 | )
100 |
101 | args = parser.parse_args()
102 | if args.command == "login":
103 | login(args)
104 | elif args.command == "print-token":
105 | print_token(args)
106 | else:
107 | print('Got unknown command "{}".'.format(args.command), file=sys.stderr)
108 | parser.print_help()
109 |
--------------------------------------------------------------------------------
/pydata_google_auth/_version.py:
--------------------------------------------------------------------------------
1 | # This file helps to compute a version number in source trees obtained from
2 | # git-archive tarball (such as those provided by githubs download-from-tag
3 | # feature). Distribution tarballs (built by setup.py sdist) and build
4 | # directories (produced by setup.py build) will contain a much shorter file
5 | # that just contains the computed version number.
6 |
7 | # This file is released into the public domain. Generated by
8 | # versioneer-0.18 (https://github.com/warner/python-versioneer)
9 |
10 | """Git implementation of _version.py."""
11 |
12 | import errno
13 | import os
14 | import re
15 | import subprocess
16 | import sys
17 |
18 |
19 | def get_keywords():
20 | """Get the keywords needed to look up the version information."""
21 | # these strings will be replaced by git during git-archive.
22 | # setup.py/versioneer.py will grep for the variable names, so they must
23 | # each be defined on a line of their own. _version.py will just call
24 | # get_keywords().
25 | git_refnames = " (HEAD -> main)"
26 | git_full = "ab923498a4f923c060c54e3b3913d002fc15bbfc"
27 | git_date = "2025-01-29 12:30:10 -0600"
28 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
29 | return keywords
30 |
31 |
32 | class VersioneerConfig:
33 | """Container for Versioneer configuration parameters."""
34 |
35 |
36 | def get_config():
37 | """Create, populate and return the VersioneerConfig() object."""
38 | # these strings are filled in when 'setup.py versioneer' creates
39 | # _version.py
40 | cfg = VersioneerConfig()
41 | cfg.VCS = "git"
42 | cfg.style = "pep440"
43 | cfg.tag_prefix = ""
44 | cfg.parentdir_prefix = "pydata_google_auth/_version.py"
45 | cfg.versionfile_source = "pydata_google_auth/_version.py"
46 | cfg.verbose = False
47 | return cfg
48 |
49 |
50 | class NotThisMethod(Exception):
51 | """Exception raised if a method is not valid for the current scenario."""
52 |
53 |
54 | LONG_VERSION_PY = {}
55 | HANDLERS = {}
56 |
57 |
58 | def register_vcs_handler(vcs, method): # decorator
59 | """Decorator to mark a method as the handler for a particular VCS."""
60 |
61 | def decorate(f):
62 | """Store f in HANDLERS[vcs][method]."""
63 | if vcs not in HANDLERS:
64 | HANDLERS[vcs] = {}
65 | HANDLERS[vcs][method] = f
66 | return f
67 |
68 | return decorate
69 |
70 |
71 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
72 | """Call the given command(s)."""
73 | assert isinstance(commands, list)
74 | p = None
75 | for c in commands:
76 | try:
77 | dispcmd = str([c] + args)
78 | # remember shell=False, so use git.cmd on windows, not just git
79 | p = subprocess.Popen(
80 | [c] + args,
81 | cwd=cwd,
82 | env=env,
83 | stdout=subprocess.PIPE,
84 | stderr=(subprocess.PIPE if hide_stderr else None),
85 | )
86 | break
87 | except EnvironmentError:
88 | e = sys.exc_info()[1]
89 | if e.errno == errno.ENOENT:
90 | continue
91 | if verbose:
92 | print("unable to run %s" % dispcmd)
93 | print(e)
94 | return None, None
95 | else:
96 | if verbose:
97 | print("unable to find command, tried %s" % (commands,))
98 | return None, None
99 | stdout = p.communicate()[0].strip()
100 | if sys.version_info[0] >= 3:
101 | stdout = stdout.decode()
102 | if p.returncode != 0:
103 | if verbose:
104 | print("unable to run %s (error)" % dispcmd)
105 | print("stdout was %s" % stdout)
106 | return None, p.returncode
107 | return stdout, p.returncode
108 |
109 |
110 | def versions_from_parentdir(parentdir_prefix, root, verbose):
111 | """Try to determine the version from the parent directory name.
112 |
113 | Source tarballs conventionally unpack into a directory that includes both
114 | the project name and a version string. We will also support searching up
115 | two directory levels for an appropriately named parent directory
116 | """
117 | rootdirs = []
118 |
119 | for i in range(3):
120 | dirname = os.path.basename(root)
121 | if dirname.startswith(parentdir_prefix):
122 | return {
123 | "version": dirname[len(parentdir_prefix) :],
124 | "full-revisionid": None,
125 | "dirty": False,
126 | "error": None,
127 | "date": None,
128 | }
129 | else:
130 | rootdirs.append(root)
131 | root = os.path.dirname(root) # up a level
132 |
133 | if verbose:
134 | print(
135 | "Tried directories %s but none started with prefix %s"
136 | % (str(rootdirs), parentdir_prefix)
137 | )
138 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
139 |
140 |
141 | @register_vcs_handler("git", "get_keywords")
142 | def git_get_keywords(versionfile_abs):
143 | """Extract version information from the given file."""
144 | # the code embedded in _version.py can just fetch the value of these
145 | # keywords. When used from setup.py, we don't want to import _version.py,
146 | # so we do it with a regexp instead. This function is not used from
147 | # _version.py.
148 | keywords = {}
149 | try:
150 | f = open(versionfile_abs, "r")
151 | for line in f.readlines():
152 | if line.strip().startswith("git_refnames ="):
153 | mo = re.search(r'=\s*"(.*)"', line)
154 | if mo:
155 | keywords["refnames"] = mo.group(1)
156 | if line.strip().startswith("git_full ="):
157 | mo = re.search(r'=\s*"(.*)"', line)
158 | if mo:
159 | keywords["full"] = mo.group(1)
160 | if line.strip().startswith("git_date ="):
161 | mo = re.search(r'=\s*"(.*)"', line)
162 | if mo:
163 | keywords["date"] = mo.group(1)
164 | f.close()
165 | except EnvironmentError:
166 | pass
167 | return keywords
168 |
169 |
170 | @register_vcs_handler("git", "keywords")
171 | def git_versions_from_keywords(keywords, tag_prefix, verbose):
172 | """Get version information from git keywords."""
173 | if not keywords:
174 | raise NotThisMethod("no keywords at all, weird")
175 | date = keywords.get("date")
176 | if date is not None:
177 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
178 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
179 | # -like" string, which we must then edit to make compliant), because
180 | # it's been around since git-1.5.3, and it's too difficult to
181 | # discover which version we're using, or to work around using an
182 | # older one.
183 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
184 | refnames = keywords["refnames"].strip()
185 | if refnames.startswith("$Format"):
186 | if verbose:
187 | print("keywords are unexpanded, not using")
188 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
189 | refs = set([r.strip() for r in refnames.strip("()").split(",")])
190 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
191 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
192 | TAG = "tag: "
193 | tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
194 | if not tags:
195 | # Either we're using git < 1.8.3, or there really are no tags. We use
196 | # a heuristic: assume all version tags have a digit. The old git %d
197 | # expansion behaves like git log --decorate=short and strips out the
198 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
199 | # between branches and tags. By ignoring refnames without digits, we
200 | # filter out many common branch names like "release" and
201 | # "stabilization", as well as "HEAD" and "master".
202 | tags = set([r for r in refs if re.search(r"\d", r)])
203 | if verbose:
204 | print("discarding '%s', no digits" % ",".join(refs - tags))
205 | if verbose:
206 | print("likely tags: %s" % ",".join(sorted(tags)))
207 | for ref in sorted(tags):
208 | # sorting will prefer e.g. "2.0" over "2.0rc1"
209 | if ref.startswith(tag_prefix):
210 | r = ref[len(tag_prefix) :]
211 | if verbose:
212 | print("picking %s" % r)
213 | return {
214 | "version": r,
215 | "full-revisionid": keywords["full"].strip(),
216 | "dirty": False,
217 | "error": None,
218 | "date": date,
219 | }
220 | # no suitable tags, so version is "0+unknown", but full hex is still there
221 | if verbose:
222 | print("no suitable tags, using unknown + full revision id")
223 | return {
224 | "version": "0+unknown",
225 | "full-revisionid": keywords["full"].strip(),
226 | "dirty": False,
227 | "error": "no suitable tags",
228 | "date": None,
229 | }
230 |
231 |
232 | @register_vcs_handler("git", "pieces_from_vcs")
233 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
234 | """Get version from 'git describe' in the root of the source tree.
235 |
236 | This only gets called if the git-archive 'subst' keywords were *not*
237 | expanded, and _version.py hasn't already been rewritten with a short
238 | version string, meaning we're inside a checked out source tree.
239 | """
240 | GITS = ["git"]
241 | if sys.platform == "win32":
242 | GITS = ["git.cmd", "git.exe"]
243 |
244 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True)
245 | if rc != 0:
246 | if verbose:
247 | print("Directory %s not under git control" % root)
248 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
249 |
250 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
251 | # if there isn't one, this yields HEX[-dirty] (no NUM)
252 | describe_out, rc = run_command(
253 | GITS,
254 | [
255 | "describe",
256 | "--tags",
257 | "--dirty",
258 | "--always",
259 | "--long",
260 | "--match",
261 | "%s*" % tag_prefix,
262 | ],
263 | cwd=root,
264 | )
265 | # --long was added in git-1.5.5
266 | if describe_out is None:
267 | raise NotThisMethod("'git describe' failed")
268 | describe_out = describe_out.strip()
269 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
270 | if full_out is None:
271 | raise NotThisMethod("'git rev-parse' failed")
272 | full_out = full_out.strip()
273 |
274 | pieces = {}
275 | pieces["long"] = full_out
276 | pieces["short"] = full_out[:7] # maybe improved later
277 | pieces["error"] = None
278 |
279 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
280 | # TAG might have hyphens.
281 | git_describe = describe_out
282 |
283 | # look for -dirty suffix
284 | dirty = git_describe.endswith("-dirty")
285 | pieces["dirty"] = dirty
286 | if dirty:
287 | git_describe = git_describe[: git_describe.rindex("-dirty")]
288 |
289 | # now we have TAG-NUM-gHEX or HEX
290 |
291 | if "-" in git_describe:
292 | # TAG-NUM-gHEX
293 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
294 | if not mo:
295 | # unparseable. Maybe git-describe is misbehaving?
296 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
297 | return pieces
298 |
299 | # tag
300 | full_tag = mo.group(1)
301 | if not full_tag.startswith(tag_prefix):
302 | if verbose:
303 | fmt = "tag '%s' doesn't start with prefix '%s'"
304 | print(fmt % (full_tag, tag_prefix))
305 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
306 | full_tag,
307 | tag_prefix,
308 | )
309 | return pieces
310 | pieces["closest-tag"] = full_tag[len(tag_prefix) :]
311 |
312 | # distance: number of commits since tag
313 | pieces["distance"] = int(mo.group(2))
314 |
315 | # commit: short hex revision ID
316 | pieces["short"] = mo.group(3)
317 |
318 | else:
319 | # HEX: no tags
320 | pieces["closest-tag"] = None
321 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
322 | pieces["distance"] = int(count_out) # total number of commits
323 |
324 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
325 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[
326 | 0
327 | ].strip()
328 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
329 |
330 | return pieces
331 |
332 |
333 | def plus_or_dot(pieces):
334 | """Return a + if we don't already have one, else return a ."""
335 | if "+" in pieces.get("closest-tag", ""):
336 | return "."
337 | return "+"
338 |
339 |
340 | def render_pep440(pieces):
341 | """Build up version string, with post-release "local version identifier".
342 |
343 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
344 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
345 |
346 | Exceptions:
347 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
348 | """
349 | if pieces["closest-tag"]:
350 | rendered = pieces["closest-tag"]
351 | if pieces["distance"] or pieces["dirty"]:
352 | rendered += plus_or_dot(pieces)
353 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
354 | if pieces["dirty"]:
355 | rendered += ".dirty"
356 | else:
357 | # exception #1
358 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
359 | if pieces["dirty"]:
360 | rendered += ".dirty"
361 | return rendered
362 |
363 |
364 | def render_pep440_pre(pieces):
365 | """TAG[.post.devDISTANCE] -- No -dirty.
366 |
367 | Exceptions:
368 | 1: no tags. 0.post.devDISTANCE
369 | """
370 | if pieces["closest-tag"]:
371 | rendered = pieces["closest-tag"]
372 | if pieces["distance"]:
373 | rendered += ".post.dev%d" % pieces["distance"]
374 | else:
375 | # exception #1
376 | rendered = "0.post.dev%d" % pieces["distance"]
377 | return rendered
378 |
379 |
380 | def render_pep440_post(pieces):
381 | """TAG[.postDISTANCE[.dev0]+gHEX] .
382 |
383 | The ".dev0" means dirty. Note that .dev0 sorts backwards
384 | (a dirty tree will appear "older" than the corresponding clean one),
385 | but you shouldn't be releasing software with -dirty anyways.
386 |
387 | Exceptions:
388 | 1: no tags. 0.postDISTANCE[.dev0]
389 | """
390 | if pieces["closest-tag"]:
391 | rendered = pieces["closest-tag"]
392 | if pieces["distance"] or pieces["dirty"]:
393 | rendered += ".post%d" % pieces["distance"]
394 | if pieces["dirty"]:
395 | rendered += ".dev0"
396 | rendered += plus_or_dot(pieces)
397 | rendered += "g%s" % pieces["short"]
398 | else:
399 | # exception #1
400 | rendered = "0.post%d" % pieces["distance"]
401 | if pieces["dirty"]:
402 | rendered += ".dev0"
403 | rendered += "+g%s" % pieces["short"]
404 | return rendered
405 |
406 |
407 | def render_pep440_old(pieces):
408 | """TAG[.postDISTANCE[.dev0]] .
409 |
410 | The ".dev0" means dirty.
411 |
412 | Eexceptions:
413 | 1: no tags. 0.postDISTANCE[.dev0]
414 | """
415 | if pieces["closest-tag"]:
416 | rendered = pieces["closest-tag"]
417 | if pieces["distance"] or pieces["dirty"]:
418 | rendered += ".post%d" % pieces["distance"]
419 | if pieces["dirty"]:
420 | rendered += ".dev0"
421 | else:
422 | # exception #1
423 | rendered = "0.post%d" % pieces["distance"]
424 | if pieces["dirty"]:
425 | rendered += ".dev0"
426 | return rendered
427 |
428 |
429 | def render_git_describe(pieces):
430 | """TAG[-DISTANCE-gHEX][-dirty].
431 |
432 | Like 'git describe --tags --dirty --always'.
433 |
434 | Exceptions:
435 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
436 | """
437 | if pieces["closest-tag"]:
438 | rendered = pieces["closest-tag"]
439 | if pieces["distance"]:
440 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
441 | else:
442 | # exception #1
443 | rendered = pieces["short"]
444 | if pieces["dirty"]:
445 | rendered += "-dirty"
446 | return rendered
447 |
448 |
449 | def render_git_describe_long(pieces):
450 | """TAG-DISTANCE-gHEX[-dirty].
451 |
452 | Like 'git describe --tags --dirty --always -long'.
453 | The distance/hash is unconditional.
454 |
455 | Exceptions:
456 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
457 | """
458 | if pieces["closest-tag"]:
459 | rendered = pieces["closest-tag"]
460 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
461 | else:
462 | # exception #1
463 | rendered = pieces["short"]
464 | if pieces["dirty"]:
465 | rendered += "-dirty"
466 | return rendered
467 |
468 |
469 | def render(pieces, style):
470 | """Render the given version pieces into the requested style."""
471 | if pieces["error"]:
472 | return {
473 | "version": "unknown",
474 | "full-revisionid": pieces.get("long"),
475 | "dirty": None,
476 | "error": pieces["error"],
477 | "date": None,
478 | }
479 |
480 | if not style or style == "default":
481 | style = "pep440" # the default
482 |
483 | if style == "pep440":
484 | rendered = render_pep440(pieces)
485 | elif style == "pep440-pre":
486 | rendered = render_pep440_pre(pieces)
487 | elif style == "pep440-post":
488 | rendered = render_pep440_post(pieces)
489 | elif style == "pep440-old":
490 | rendered = render_pep440_old(pieces)
491 | elif style == "git-describe":
492 | rendered = render_git_describe(pieces)
493 | elif style == "git-describe-long":
494 | rendered = render_git_describe_long(pieces)
495 | else:
496 | raise ValueError("unknown style '%s'" % style)
497 |
498 | return {
499 | "version": rendered,
500 | "full-revisionid": pieces["long"],
501 | "dirty": pieces["dirty"],
502 | "error": None,
503 | "date": pieces.get("date"),
504 | }
505 |
506 |
507 | def get_versions():
508 | """Get version information or return default if unable to do so."""
509 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
510 | # __file__, we can work backwards from there to the root. Some
511 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
512 | # case we can only use expanded keywords.
513 |
514 | cfg = get_config()
515 | verbose = cfg.verbose
516 |
517 | try:
518 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
519 | except NotThisMethod:
520 | pass
521 |
522 | try:
523 | root = os.path.realpath(__file__)
524 | # versionfile_source is the relative path from the top of the source
525 | # tree (where the .git directory might live) to this file. Invert
526 | # this to find the root from __file__.
527 | for i in cfg.versionfile_source.split("/"):
528 | root = os.path.dirname(root)
529 | except NameError:
530 | return {
531 | "version": "0+unknown",
532 | "full-revisionid": None,
533 | "dirty": None,
534 | "error": "unable to find root of source tree",
535 | "date": None,
536 | }
537 |
538 | try:
539 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
540 | return render(pieces, cfg.style)
541 | except NotThisMethod:
542 | pass
543 |
544 | try:
545 | if cfg.parentdir_prefix:
546 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
547 | except NotThisMethod:
548 | pass
549 |
550 | return {
551 | "version": "0+unknown",
552 | "full-revisionid": None,
553 | "dirty": None,
554 | "error": "unable to compute version",
555 | "date": None,
556 | }
557 |
--------------------------------------------------------------------------------
/pydata_google_auth/_webserver.py:
--------------------------------------------------------------------------------
1 | """Helpers for running a local webserver to receive authorization code."""
2 |
3 | import socket
4 | from contextlib import closing
5 |
6 | from pydata_google_auth import exceptions
7 |
8 |
9 | LOCALHOST = "localhost"
10 | DEFAULT_PORTS_TO_TRY = 100
11 |
12 |
13 | def is_port_open(port):
14 | """Check if a port is open on localhost.
15 |
16 | Based on StackOverflow answer: https://stackoverflow.com/a/43238489/101923
17 |
18 | Parameters
19 | ----------
20 | port : int
21 | A port to check on localhost.
22 |
23 | Returns
24 | -------
25 | is_open : bool
26 | True if a socket can be opened at the requested port.
27 | """
28 | with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
29 | try:
30 | sock.bind((LOCALHOST, port))
31 | sock.listen(1)
32 | except socket.error:
33 | is_open = False
34 | else:
35 | is_open = True
36 | return is_open
37 |
38 |
39 | def find_open_port(start=8080, stop=None):
40 | """Find an open port between ``start`` and ``stop``.
41 |
42 | Parameters
43 | ----------
44 | start : Optional[int]
45 | Beginning of range of ports to try. Defaults to 8080.
46 | stop : Optional[int]
47 | End of range of ports to try (not including exactly equals ``stop``).
48 | This function tries 100 possible ports if no ``stop`` is specified.
49 |
50 | Returns
51 | -------
52 | Optional[int]
53 | ``None`` if no open port is found, otherwise an integer indicating an
54 | open port.
55 | """
56 | if not stop:
57 | stop = start + DEFAULT_PORTS_TO_TRY
58 |
59 | for port in range(start, stop):
60 | if is_port_open(port):
61 | return port
62 |
63 | # No open ports found.
64 | return None
65 |
66 |
67 | def run_local_server(app_flow, **kwargs):
68 | """Run local webserver installed app flow on some open port.
69 |
70 | Parameters
71 | ----------
72 | app_flow : google_auth_oauthlib.flow.InstalledAppFlow
73 | Installed application flow to fetch user credentials.
74 |
75 | Returns
76 | -------
77 | google.auth.credentials.Credentials
78 | User credentials from installed application flow.
79 |
80 | Raises
81 | ------
82 | pydata_google_auth.exceptions.PyDataConnectionError
83 | If no open port can be found in the range from 8080 to 8089,
84 | inclusive.
85 | """
86 | port = find_open_port()
87 | if not port:
88 | raise exceptions.PyDataConnectionError("Could not find open port.")
89 | return app_flow.run_local_server(host=LOCALHOST, port=port, **kwargs)
90 |
--------------------------------------------------------------------------------
/pydata_google_auth/auth.py:
--------------------------------------------------------------------------------
1 | """Private module for fetching Google API credentials."""
2 |
3 | import logging
4 |
5 | import google.auth
6 | import google.auth.exceptions
7 | import google.oauth2.credentials
8 | from google_auth_oauthlib import flow
9 | import oauthlib.oauth2.rfc6749.errors
10 | import google.auth.transport.requests
11 |
12 | from pydata_google_auth import exceptions
13 | from pydata_google_auth import cache
14 | from pydata_google_auth import _webserver
15 |
16 |
17 | logger = logging.getLogger(__name__)
18 |
19 | DESKTOP_CLIENT_ID = (
20 | "262006177488-3425ks60hkk80fssi9vpohv88g6q1iqd.apps.googleusercontent.com"
21 | )
22 | DESKTOP_CLIENT_SECRET = "JSF-iczmzEgbTR-XK-2xaWAc"
23 |
24 | # webapp CID/CS to enable a redirect uri/client id/secret that is not OOB.
25 | WEBAPP_REDIRECT_URI = "https://pydata-google-auth.readthedocs.io/en/latest/oauth.html"
26 | WEBAPP_CLIENT_ID = (
27 | "262006177488-ka1m0ue4fptfmt9siejdd5lom7p39upa.apps.googleusercontent.com"
28 | )
29 | WEBAPP_CLIENT_SECRET = "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"
30 |
31 | GOOGLE_AUTH_URI = "https://accounts.google.com/o/oauth2/auth"
32 | GOOGLE_TOKEN_URI = "https://oauth2.googleapis.com/token"
33 |
34 | AUTH_URI_KWARGS = {
35 | # Ensure that we get a refresh token by telling Google we want to assume
36 | # this is first time we're authorizing this app. See:
37 | # https://github.com/googleapis/google-api-python-client/issues/213#issuecomment-205886341
38 | "prompt": "consent",
39 | }
40 |
41 |
42 | def _run_webapp(flow, redirect_uri=None, **kwargs):
43 | if redirect_uri:
44 | flow.redirect_uri = redirect_uri
45 | else:
46 | flow.redirect_uri = flow._OOB_REDIRECT_URI
47 |
48 | auth_url, _ = flow.authorization_url(**kwargs)
49 | authorization_prompt_message = (
50 | "Please visit this URL to authorize this application: {url}"
51 | )
52 |
53 | if authorization_prompt_message:
54 | print(authorization_prompt_message.format(url=auth_url))
55 |
56 | authorization_code_message = "Enter the authorization code: "
57 |
58 | code = input(authorization_code_message)
59 | flow.fetch_token(code=code)
60 | return flow.credentials
61 |
62 |
63 | def default(
64 | scopes,
65 | client_id=None,
66 | client_secret=None,
67 | credentials_cache=cache.READ_WRITE,
68 | use_local_webserver=True,
69 | auth_local_webserver=None,
70 | redirect_uri=None,
71 | ):
72 | """
73 | Get credentials and default project for accessing Google APIs.
74 |
75 | This method first attempts to get credentials via the
76 | :func:`google.auth.default` function. If it is unable to get valid
77 | credentials, it then attempts to get user account credentials via the
78 | :func:`pydata_google_auth.get_user_credentials` function.
79 |
80 | Parameters
81 | ----------
82 | scopes : list[str]
83 | A list of scopes to use when authenticating to Google APIs. See the
84 | `list of OAuth 2.0 scopes for Google APIs
85 | `_.
86 | client_id : str, optional
87 | The client secrets to use when prompting for user credentials.
88 | Defaults to a client ID associated with pydata-google-auth.
89 |
90 | If you are a tool or library author, you must override the default
91 | value with a client ID associated with your project. Per the `Google
92 | APIs terms of service `_, you
93 | must not mask your API client's identity when using Google APIs.
94 | client_secret : str, optional
95 | The client secrets to use when prompting for user credentials.
96 | Defaults to a client secret associated with pydata-google-auth.
97 |
98 | If you are a tool or library author, you must override the default
99 | value with a client secret associated with your project. Per the
100 | `Google APIs terms of service
101 | `_, you must not mask your API
102 | client's identity when using Google APIs.
103 | credentials_cache : pydata_google_auth.cache.CredentialsCache, optional
104 | An object responsible for loading and saving user credentials.
105 |
106 | By default, pydata-google-auth reads and writes credentials in
107 | ``$HOME/.config/pydata/pydata_google_credentials.json`` or
108 | ``$APPDATA/.config/pydata/pydata_google_credentials.json`` on
109 | Windows.
110 | use_local_webserver : bool, optional
111 | Use a local webserver for the user authentication
112 | :class:`google_auth_oauthlib.flow.InstalledAppFlow`. Binds a
113 | webserver to an open port on ``localhost`` between 8080 and 8089,
114 | inclusive, to receive authentication token. If not set, defaults to
115 | ``False``, which requests a token via the console.
116 | auth_local_webserver : deprecated
117 | Use the ``use_local_webserver`` parameter instead.
118 | redirect_uri : str, optional
119 | Redirect URIs are endpoints to which the OAuth 2.0 server can send
120 | responses. They may be used in situations such as
121 |
122 | * an organization has an org specific authentication endpoint
123 | * an organization can not use an endpoint directly because of
124 | constraints on access to the internet (i.e. when running code on a
125 | remotely hosted device).
126 |
127 | Returns
128 | -------
129 | credentials, project_id : tuple[google.auth.credentials.Credentials, str or None]
130 | credentials : OAuth 2.0 credentials for accessing Google APIs
131 |
132 | project_id : A default Google developer project ID, if one could be determined
133 | from the credentials. For example, this returns the project ID
134 | associated with a service account when using a service account key
135 | file. It returns None when using user-based credentials.
136 |
137 | Raises
138 | ------
139 | pydata_google_auth.exceptions.PyDataCredentialsError
140 | If unable to get valid credentials.
141 | """
142 | if auth_local_webserver is not None:
143 | use_local_webserver = auth_local_webserver
144 |
145 | # Try to retrieve Application Default Credentials
146 | credentials, default_project = get_application_default_credentials(scopes)
147 |
148 | if credentials and credentials.valid:
149 | return credentials, default_project
150 |
151 | credentials = get_user_credentials(
152 | scopes,
153 | client_id=client_id,
154 | client_secret=client_secret,
155 | credentials_cache=credentials_cache,
156 | use_local_webserver=use_local_webserver,
157 | redirect_uri=redirect_uri,
158 | )
159 |
160 | if not credentials or not credentials.valid:
161 | raise exceptions.PyDataCredentialsError("Could not get any valid credentials.")
162 |
163 | return credentials, None
164 |
165 |
166 | def try_colab_auth_import():
167 | try:
168 | from google.colab import auth
169 |
170 | return auth
171 | except Exception:
172 | # We are catching a broad exception class here because we want to be
173 | # agnostic to anything that could internally go wrong in the google
174 | # colab auth. Some of the known exception we want to pass on are:
175 | #
176 | # ModuleNotFoundError: No module named 'google.colab'
177 | # ImportError: cannot import name 'auth' from 'google.cloud'
178 | return None
179 |
180 |
181 | def get_colab_default_credentials(scopes):
182 | """This is a special handling for google colab environment where we want to
183 | use the colab specific authentication flow.
184 |
185 | See:
186 | https://github.com/googlecolab/colabtools/blob/3c8772efd332289e1c6d1204826b0915d22b5b95/google/colab/auth.py#L209
187 | """
188 | auth = try_colab_auth_import()
189 | if auth is None:
190 | return None, None
191 |
192 | try:
193 | auth.authenticate_user()
194 |
195 | # authenticate_user() sets the default credentials, but we
196 | # still need to get the token from those default credentials.
197 | return get_application_default_credentials(scopes=scopes)
198 | except Exception:
199 | # We are catching a broad exception class here because we want to be
200 | # agnostic to anything that could internally go wrong in the google
201 | # colab auth. Some of the known exception we want to pass on are:
202 | #
203 | # MessageError: Error: credential propagation was unsuccessful
204 | #
205 | # The MessageError happens on Vertex Colab when it fails to resolve auth
206 | # from the Compute Engine Metadata server.
207 | return None, None
208 |
209 |
210 | def get_application_default_credentials(scopes):
211 | """
212 | This method tries to retrieve the "default application credentials".
213 | This could be useful for running code on Google Cloud Platform.
214 |
215 | Parameters
216 | ----------
217 | project_id (str, optional): Override the default project ID.
218 |
219 | Returns
220 | -------
221 | - GoogleCredentials,
222 | If the default application credentials can be retrieved
223 | from the environment. The retrieved credentials should also
224 | have access to the project (project_id) on BigQuery.
225 | - OR None,
226 | If default application credentials can not be retrieved
227 | from the environment. Or, the retrieved credentials do not
228 | have access to the project (project_id) on BigQuery.
229 | """
230 | try:
231 | credentials, project = google.auth.default(scopes=scopes)
232 | except (google.auth.exceptions.DefaultCredentialsError, IOError) as exc:
233 | logger.debug("Error getting default credentials: {}".format(str(exc)))
234 | return None, None
235 |
236 | if credentials and not credentials.valid:
237 | request = google.auth.transport.requests.Request()
238 | try:
239 | credentials.refresh(request)
240 | except google.auth.exceptions.RefreshError:
241 | # Sometimes (such as on Travis) google-auth returns GCE
242 | # credentials, but fetching the token for those credentials doesn't
243 | # actually work. See:
244 | # https://github.com/googleapis/google-auth-library-python/issues/287
245 | return None, None
246 |
247 | return credentials, project
248 |
249 |
250 | def get_user_credentials(
251 | scopes,
252 | client_id=None,
253 | client_secret=None,
254 | credentials_cache=cache.READ_WRITE,
255 | use_local_webserver=True,
256 | auth_local_webserver=None,
257 | redirect_uri=None,
258 | ):
259 | """
260 | Gets user account credentials.
261 |
262 | This function authenticates using user credentials, by trying to
263 |
264 | 1. Authenticate using ``google.colab.authenticate_user()``
265 | 2. Load saved credentials from the ``credentials_cache``
266 | 3. Go through the OAuth 2.0 flow (with provided ``client_id`` and
267 | ``client_secret``)
268 |
269 | The default read-write cache attempts to read credentials from a file on
270 | disk. If these credentials are not found or are invalid, it begins an
271 | OAuth 2.0 flow to get credentials. You'll open a browser window asking
272 | for you to authenticate to your Google account using the product name
273 | ``PyData Google Auth``. The permissions it requests correspond to the
274 | scopes you've provided.
275 |
276 | Additional information on the user credentials authentication mechanism
277 | can be found `here
278 | `__.
279 |
280 | Parameters
281 | ----------
282 | scopes : list[str]
283 | A list of scopes to use when authenticating to Google APIs. See the
284 | `list of OAuth 2.0 scopes for Google APIs
285 | `_.
286 | client_id : str, optional
287 | The client secrets to use when prompting for user credentials.
288 | Defaults to a client ID associated with pydata-google-auth.
289 |
290 | If you are a tool or library author, you must override the default
291 | value with a client ID associated with your project. Per the `Google
292 | APIs terms of service `_, you
293 | must not mask your API client's identity when using Google APIs.
294 | client_secret : str, optional
295 | The client secrets to use when prompting for user credentials.
296 | Defaults to a client secret associated with pydata-google-auth.
297 |
298 | If you are a tool or library author, you must override the default
299 | value with a client secret associated with your project. Per the
300 | `Google APIs terms of service
301 | `_, you must not mask your API
302 | client's identity when using Google APIs.
303 | credentials_cache : pydata_google_auth.cache.CredentialsCache, optional
304 | An object responsible for loading and saving user credentials.
305 |
306 | By default, pydata-google-auth reads and writes credentials in
307 | ``$HOME/.config/pydata/pydata_google_credentials.json`` or
308 | ``$APPDATA/.config/pydata/pydata_google_credentials.json`` on
309 | Windows.
310 | use_local_webserver : bool, optional
311 | Use a local webserver for the user authentication
312 | :class:`google_auth_oauthlib.flow.InstalledAppFlow`. Binds a
313 | webserver to an open port on ``localhost`` between 8080 and 8089,
314 | inclusive, to receive authentication token. If not set, defaults to
315 | ``False``, which requests a token via the console.
316 | auth_local_webserver : deprecated
317 | Use the ``use_local_webserver`` parameter instead.
318 | redirect_uri : str, optional
319 | Redirect URIs are endpoints to which the OAuth 2.0 server can send
320 | responses. They may be used in situations such as
321 |
322 | * an organization has an org specific authentication endpoint
323 | * an organization can not use an endpoint directly because of
324 | constraints on access to the internet (i.e. when running code on a
325 | remotely hosted device).
326 |
327 | Returns
328 | -------
329 | credentials : google.oauth2.credentials.Credentials
330 | Credentials for the user, with the requested scopes.
331 |
332 | Raises
333 | ------
334 | pydata_google_auth.exceptions.PyDataCredentialsError
335 | If unable to get valid user credentials.
336 | """
337 |
338 | # Try to authenticate the user with Colab-based credentials, if possible.
339 | # The default_project ignored for colab credentials. It's not usually set,
340 | # anyway.
341 | credentials, _ = get_colab_default_credentials(scopes)
342 |
343 | # Break early to avoid trying to fetch any other kinds of credentials.
344 | # Prefer Colab credentials over any credentials based on the default
345 | # client ID.
346 | if credentials:
347 | # Make sure to exit early since we don't want to try to save these
348 | # credentials to a cache file.
349 | return credentials
350 |
351 | if auth_local_webserver is not None:
352 | use_local_webserver = auth_local_webserver
353 |
354 | # Use None as default for client_id and client_secret so that the values
355 | # aren't included in the docs. A string of bytes isn't useful for the
356 | # documentation and might encourage the values to be used outside of this
357 | # library.
358 |
359 | if use_local_webserver:
360 | if client_id is None:
361 | client_id = DESKTOP_CLIENT_ID
362 | if client_secret is None:
363 | client_secret = DESKTOP_CLIENT_SECRET
364 |
365 | elif not use_local_webserver and not redirect_uri:
366 | if client_id is None:
367 | client_id = WEBAPP_CLIENT_ID
368 | if client_secret is None:
369 | client_secret = WEBAPP_CLIENT_SECRET
370 | redirect_uri = WEBAPP_REDIRECT_URI
371 |
372 | elif not use_local_webserver and redirect_uri:
373 | if (client_id is None) or (client_secret is None):
374 | raise exceptions.PyDataCredentialsError(
375 | """Unable to get valid credentials: please provide a
376 | valid client_id and/or client_secret."""
377 | )
378 |
379 | credentials = credentials_cache.load()
380 |
381 | client_config = {
382 | "installed": {
383 | "client_id": client_id,
384 | "client_secret": client_secret,
385 | "redirect_uris": [redirect_uri, "urn:ietf:wg:oauth:2.0:oob"],
386 | "auth_uri": GOOGLE_AUTH_URI,
387 | "token_uri": GOOGLE_TOKEN_URI,
388 | }
389 | }
390 |
391 | if credentials is None:
392 | app_flow = flow.InstalledAppFlow.from_client_config(
393 | client_config, scopes=scopes
394 | )
395 |
396 | try:
397 | if use_local_webserver:
398 | credentials = _webserver.run_local_server(app_flow, **AUTH_URI_KWARGS)
399 | else:
400 | credentials = _run_webapp(
401 | app_flow, redirect_uri=redirect_uri, **AUTH_URI_KWARGS
402 | )
403 |
404 | except oauthlib.oauth2.rfc6749.errors.OAuth2Error as exc:
405 | raise exceptions.PyDataCredentialsError(
406 | "Unable to get valid credentials: {}".format(exc)
407 | )
408 |
409 | credentials_cache.save(credentials)
410 |
411 | if credentials and not credentials.valid:
412 | request = google.auth.transport.requests.Request()
413 | credentials.refresh(request)
414 |
415 | return credentials
416 |
417 |
418 | def save_user_credentials(
419 | scopes, path, client_id=None, client_secret=None, use_local_webserver=True
420 | ):
421 | """
422 | Gets user account credentials and saves them to a JSON file at ``path``.
423 |
424 | This function authenticates using user credentials by going through the
425 | OAuth 2.0 flow.
426 |
427 | Parameters
428 | ----------
429 |
430 | scopes : list[str]
431 | A list of scopes to use when authenticating to Google APIs. See the
432 | `list of OAuth 2.0 scopes for Google APIs
433 | `_.
434 | path : str
435 | Path to save credentials JSON file.
436 | client_id : str, optional
437 | The client secrets to use when prompting for user credentials.
438 | Defaults to a client ID associated with pydata-google-auth.
439 |
440 | If you are a tool or library author, you must override the default
441 | value with a client ID associated with your project. Per the `Google
442 | APIs terms of service `_, you
443 | must not mask your API client's identity when using Google APIs.
444 | client_secret : str, optional
445 | The client secrets to use when prompting for user credentials.
446 | Defaults to a client secret associated with pydata-google-auth.
447 |
448 | If you are a tool or library author, you must override the default
449 | value with a client secret associated with your project. Per the
450 | `Google APIs terms of service
451 | `_, you must not mask your API
452 | client's identity when using Google APIs.
453 | use_local_webserver : bool, optional
454 | Use a local webserver for the user authentication
455 | :class:`google_auth_oauthlib.flow.InstalledAppFlow`. Binds a
456 | webserver to an open port on ``localhost`` between 8080 and 8089,
457 | inclusive, to receive authentication token. If not set, defaults to
458 | ``False``, which requests a token via the console.
459 |
460 | Returns
461 | -------
462 |
463 | None
464 |
465 | Raises
466 | ------
467 | pydata_google_auth.exceptions.PyDataCredentialsError
468 | If unable to get valid user credentials.
469 |
470 | Examples
471 | --------
472 |
473 | Get credentials for Google Cloud Platform and save them to
474 | ``/home/username/keys/google-credentials.json``.
475 |
476 | .. code-block:: python
477 |
478 | pydata_google_auth.save_user_credentials(
479 | ["https://www.googleapis.com/auth/cloud-platform"],
480 | "/home/username/keys/google-credentials.json",
481 | use_local_webserver=True,
482 | )
483 |
484 | Set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to use
485 | these credentials with Google Application Default Credentials.
486 |
487 | .. code-block:: bash
488 |
489 | export GOOGLE_APPLICATION_CREDENTIALS='/home/username/keys/google-credentials.json'
490 | """
491 | credentials = get_user_credentials(
492 | scopes,
493 | client_id=client_id,
494 | client_secret=client_secret,
495 | credentials_cache=cache.NOOP,
496 | use_local_webserver=use_local_webserver,
497 | )
498 | cache._save_user_account_credentials(credentials, path)
499 |
500 |
501 | def load_user_credentials(path):
502 | """
503 | Gets user account credentials from JSON file at ``path``.
504 |
505 | .. warning::
506 | Important: If you accept a credential configuration (credential JSON/File/Stream)
507 | from an external source for authentication to Google Cloud Platform, you must
508 | validate it before providing it to any Google API or client library. Providing an
509 | unvalidated credential configuration to Google APIs or libraries can compromise
510 | the security of your systems and data. For more information, refer to
511 | `Validate credential configurations from external sources`_.
512 |
513 | .. _Validate credential configurations from external sources:
514 | https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
515 |
516 | Parameters
517 | ----------
518 | path : str
519 | Path to credentials JSON file.
520 |
521 | Returns
522 | -------
523 |
524 | google.auth.credentials.Credentials
525 |
526 | Raises
527 | ------
528 | pydata_google_auth.exceptions.PyDataCredentialsError
529 | If unable to load user credentials.
530 |
531 | Examples
532 | --------
533 |
534 | Load credentials and use them to construct a BigQuery client.
535 |
536 | .. code-block:: python
537 |
538 | import pydata_google_auth
539 | import google.cloud.bigquery
540 |
541 | credentials = pydata_google_auth.load_user_credentials(
542 | "/home/username/keys/google-credentials.json",
543 | )
544 | client = google.cloud.bigquery.BigQueryClient(
545 | credentials=credentials,
546 | project="my-project-id"
547 | )
548 | """
549 | credentials = cache._load_user_credentials_from_file(path)
550 | if not credentials:
551 | raise exceptions.PyDataCredentialsError("Could not load credentials.")
552 | return credentials
553 |
554 |
555 | def load_service_account_credentials(path, scopes=None):
556 | """
557 | Gets service account credentials from JSON file at ``path``.
558 |
559 | Parameters
560 | ----------
561 | path : str
562 | Path to credentials JSON file.
563 | scopes : list[str], optional
564 | A list of scopes to use when authenticating to Google APIs. See the
565 | `list of OAuth 2.0 scopes for Google APIs
566 | `_.
567 |
568 | Returns
569 | -------
570 |
571 | google.oauth2.service_account.Credentials
572 |
573 | Raises
574 | ------
575 | pydata_google_auth.exceptions.PyDataCredentialsError
576 | If unable to load service credentials.
577 |
578 | Examples
579 | --------
580 |
581 | Load credentials and use them to construct a BigQuery client.
582 |
583 | .. code-block:: python
584 |
585 | import pydata_google_auth
586 | import google.cloud.bigquery
587 |
588 | credentials = pydata_google_auth.load_service_account_credentials(
589 | "/home/username/keys/google-service-account-credentials.json",
590 | )
591 | client = google.cloud.bigquery.BigQueryClient(
592 | credentials=credentials,
593 | project=credentials.project_id
594 | )
595 | """
596 |
597 | credentials = cache._load_service_account_credentials_from_file(path, scopes=scopes)
598 | if not credentials:
599 | raise exceptions.PyDataCredentialsError("Could not load credentials.")
600 | return credentials
601 |
--------------------------------------------------------------------------------
/pydata_google_auth/cache.py:
--------------------------------------------------------------------------------
1 | """Caching implementations for reading and writing user credentials."""
2 |
3 | import errno
4 | import json
5 | import logging
6 | import os
7 | import os.path
8 |
9 | import google.oauth2.credentials
10 | from google.oauth2 import service_account
11 |
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | _DIRNAME = "pydata"
17 | _FILENAME = "pydata_google_credentials.json"
18 |
19 |
20 | def _get_default_credentials_path(credentials_dirname, credentials_filename):
21 | """
22 | Gets the default path to the Google user credentials
23 |
24 | Returns
25 | -------
26 | str
27 | Path to the Google user credentials
28 | """
29 | config_path = None
30 |
31 | if os.name == "nt":
32 | config_path = os.getenv("APPDATA")
33 | if not config_path:
34 | config_path = os.path.join(os.path.expanduser("~"), ".config")
35 |
36 | config_path = os.path.join(config_path, credentials_dirname)
37 | return os.path.join(config_path, credentials_filename)
38 |
39 |
40 | def _load_user_credentials_from_info(credentials_json):
41 | credentials = google.oauth2.credentials.Credentials(
42 | token=credentials_json.get("access_token"),
43 | refresh_token=credentials_json.get("refresh_token"),
44 | id_token=credentials_json.get("id_token"),
45 | token_uri=credentials_json.get("token_uri"),
46 | client_id=credentials_json.get("client_id"),
47 | client_secret=credentials_json.get("client_secret"),
48 | scopes=credentials_json.get("scopes"),
49 | )
50 |
51 | if credentials and not credentials.valid:
52 | request = google.auth.transport.requests.Request()
53 | try:
54 | credentials.refresh(request)
55 | except google.auth.exceptions.RefreshError:
56 | # Credentials could be expired or revoked. Try to reauthorize.
57 | return None
58 |
59 | return credentials
60 |
61 |
62 | def _load_user_credentials_from_file(credentials_path):
63 | """
64 | Loads user account credentials from a local file.
65 |
66 | Parameters
67 | ----------
68 | None
69 |
70 | Returns
71 | -------
72 | - GoogleCredentials,
73 | If the credentials can loaded. The retrieved credentials should
74 | also have access to the project (project_id) on BigQuery.
75 | - OR None,
76 | If credentials can not be loaded from a file. Or, the retrieved
77 | credentials do not have access to the project (project_id)
78 | on BigQuery.
79 | """
80 | try:
81 | with open(credentials_path) as credentials_file:
82 | credentials_json = json.load(credentials_file)
83 | except (IOError, ValueError) as exc:
84 | logger.debug(
85 | "Error loading credentials from {}: {}".format(credentials_path, str(exc))
86 | )
87 | return None
88 |
89 | return _load_user_credentials_from_info(credentials_json)
90 |
91 |
92 | def _save_user_account_credentials(credentials, credentials_path):
93 | """
94 | Saves user account credentials to a local file.
95 | """
96 |
97 | # Create the direcory if it doesn't exist.
98 | # https://stackoverflow.com/a/12517490/101923
99 | config_dir = os.path.dirname(credentials_path)
100 | if not os.path.exists(config_dir):
101 | try:
102 | os.makedirs(config_dir)
103 | except OSError as exc: # Guard against race condition.
104 | if exc.errno != errno.EEXIST:
105 | logger.warning("Unable to create credentials directory.")
106 | return
107 |
108 | try:
109 | with open(credentials_path, "w") as credentials_file:
110 | credentials_json = {
111 | "refresh_token": credentials.refresh_token,
112 | "id_token": credentials.id_token,
113 | "token_uri": credentials.token_uri,
114 | "client_id": credentials.client_id,
115 | "client_secret": credentials.client_secret,
116 | "scopes": credentials.scopes,
117 | # Required for Application Default Credentials to detect the
118 | # credentials type. See:
119 | # https://github.com/pydata/pydata-google-auth/issues/22
120 | "type": "authorized_user",
121 | }
122 | json.dump(credentials_json, credentials_file)
123 | except IOError:
124 | logger.warning("Unable to save credentials.")
125 |
126 |
127 | def _load_service_account_credentials_from_file(credentials_path, **kwargs):
128 | try:
129 | with open(credentials_path) as credentials_file:
130 | credentials_json = json.load(credentials_file)
131 | except (IOError, ValueError) as exc:
132 | logger.debug(
133 | "Error loading credentials from {}: {}".format(credentials_path, str(exc))
134 | )
135 | return None
136 |
137 | return _load_service_account_credentials_from_info(credentials_json, **kwargs)
138 |
139 |
140 | def _load_service_account_credentials_from_info(credentials_json, **kwargs):
141 | credentials = service_account.Credentials.from_service_account_info(
142 | credentials_json, **kwargs
143 | )
144 | if not credentials.valid:
145 | request = google.auth.transport.requests.Request()
146 | try:
147 | credentials.refresh(request)
148 | except google.auth.exceptions.RefreshError as exc:
149 | # Credentials could be expired or revoked.
150 | logger.debug("Error refreshing credentials: {}".format(str(exc)))
151 | return None
152 |
153 | return credentials
154 |
155 |
156 | class CredentialsCache(object):
157 | """
158 | Shared base class for crentials classes.
159 |
160 | This class also functions as a noop implementation of a credentials class.
161 | """
162 |
163 | def load(self):
164 | """
165 | Load credentials from disk.
166 |
167 | Does nothing in this base class.
168 |
169 | Returns
170 | -------
171 | google.oauth2.credentials.Credentials, optional
172 | Returns user account credentials loaded from disk or ``None`` if no
173 | credentials could be found.
174 | """
175 | pass
176 |
177 | def save(self, credentials):
178 | """
179 | Write credentials to disk.
180 |
181 | Does nothing in this base class.
182 |
183 | Parameters
184 | ----------
185 | credentials : google.oauth2.credentials.Credentials
186 | User credentials to save to disk.
187 | """
188 | pass
189 |
190 |
191 | class ReadWriteCredentialsCache(CredentialsCache):
192 | """
193 | A :class:`~pydata_google_auth.cache.CredentialsCache` which writes to
194 | disk and reads cached credentials from disk.
195 |
196 | Parameters
197 | ----------
198 | dirname : str, optional
199 | Name of directory to write credentials to. This directory is created
200 | within the ``.config`` subdirectory of the ``HOME`` (``APPDATA`` on
201 | Windows) directory.
202 | filename : str, optional
203 | Name of the credentials file within the credentials directory.
204 | """
205 |
206 | def __init__(self, dirname=_DIRNAME, filename=_FILENAME):
207 | super(ReadWriteCredentialsCache, self).__init__()
208 | self._path = _get_default_credentials_path(dirname, filename)
209 |
210 | def load(self):
211 | """
212 | Load credentials from disk.
213 |
214 | Returns
215 | -------
216 | google.oauth2.credentials.Credentials, optional
217 | Returns user account credentials loaded from disk or ``None`` if no
218 | credentials could be found.
219 | """
220 | return _load_user_credentials_from_file(self._path)
221 |
222 | def save(self, credentials):
223 | """
224 | Write credentials to disk.
225 |
226 | Parameters
227 | ----------
228 | credentials : google.oauth2.credentials.Credentials
229 | User credentials to save to disk.
230 | """
231 | _save_user_account_credentials(credentials, self._path)
232 |
233 |
234 | class WriteOnlyCredentialsCache(CredentialsCache):
235 | """
236 | A :class:`~pydata_google_auth.cache.CredentialsCache` which writes to
237 | disk, but doesn't read from disk.
238 |
239 | Use this class to reauthorize against Google APIs and cache your
240 | credentials for later.
241 |
242 | Parameters
243 | ----------
244 | dirname : str, optional
245 | Name of directory to write credentials to. This directory is created
246 | within the ``.config`` subdirectory of the ``HOME`` (``APPDATA`` on
247 | Windows) directory.
248 | filename : str, optional
249 | Name of the credentials file within the credentials directory.
250 | """
251 |
252 | def __init__(self, dirname=_DIRNAME, filename=_FILENAME):
253 | super(WriteOnlyCredentialsCache, self).__init__()
254 | self._path = _get_default_credentials_path(dirname, filename)
255 |
256 | def save(self, credentials):
257 | """
258 | Write credentials to disk.
259 |
260 | Parameters
261 | ----------
262 | credentials : google.oauth2.credentials.Credentials
263 | User credentials to save to disk.
264 | """
265 | _save_user_account_credentials(credentials, self._path)
266 |
267 |
268 | NOOP = CredentialsCache()
269 | """
270 | Noop impmentation of credentials cache.
271 |
272 | This cache always reauthorizes and never save credentials to disk.
273 | Recommended for shared machines.
274 | """
275 |
276 | READ_WRITE = ReadWriteCredentialsCache()
277 | """
278 | Write credentials to disk and read cached credentials from disk.
279 | """
280 |
281 | REAUTH = WriteOnlyCredentialsCache()
282 | """
283 | Write credentials to disk. Never read cached credentials from disk.
284 |
285 | Use this to reauthenticate and refresh the cached credentials.
286 | """
287 |
--------------------------------------------------------------------------------
/pydata_google_auth/exceptions.py:
--------------------------------------------------------------------------------
1 | class PyDataCredentialsError(ValueError):
2 | """
3 | Raised when invalid credentials are provided, or tokens have expired.
4 | """
5 |
6 |
7 | class PyDataConnectionError(RuntimeError):
8 | """
9 | Raised when unable to fetch credentials due to connection error.
10 | """
11 |
--------------------------------------------------------------------------------
/release-procedure.md:
--------------------------------------------------------------------------------
1 | # Releasing pydata-google-auth
2 |
3 | * Update the CHANGELOG. Example: https://github.com/pydata/pydata-google-auth/pull/60
4 |
5 | * Tag commit
6 |
7 | git tag -a x.x.x -m 'Version x.x.x'
8 |
9 | * and push to github
10 |
11 | git push upstream main --tags
12 |
13 | * Build the package
14 |
15 | git clean -xfd
16 | python setup.py sdist bdist_wheel --universal
17 |
18 | * Upload to test PyPI
19 |
20 | twine upload --repository testpypi dist/*
21 |
22 | * Try out test PyPI package
23 |
24 | pip install --upgrade --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple pydata-google-auth
25 |
26 | * Upload to PyPI
27 |
28 | twine upload dist/*
29 |
30 | * Do a pull-request to the feedstock on `pydata-google-auth-feedstock `__
31 |
32 | update the version
33 | update the SHA256 (retrieve from PyPI)
34 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | flake8
2 | nox
3 | pytest
4 | pyfakefs
5 | setuptools
6 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | google-auth
2 | google-auth-oauthlib
3 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 |
2 | # See the docstring in versioneer.py for instructions. Note that you must
3 | # re-run 'versioneer.py setup' after changing this section, and commit the
4 | # resulting files.
5 |
6 | [versioneer]
7 | VCS = git
8 | style = pep440
9 | versionfile_source = pydata_google_auth/_version.py
10 | versionfile_build = pydata_google_auth/_version.py
11 | tag_prefix =
12 | parentdir_prefix = pydata_google_auth-
13 |
14 | [flake8]
15 | ignore = E731
16 |
17 | [isort]
18 | default_section=THIRDPARTY
19 | known_first_party=pydata_google_auth
20 | multi_line_output=4
21 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | import versioneer
5 | from setuptools import find_packages, setup
6 |
7 | NAME = "pydata-google-auth"
8 |
9 |
10 | # versioning
11 | cmdclass = versioneer.get_cmdclass()
12 |
13 |
14 | def readme():
15 | with open("README.rst") as f:
16 | return f.read()
17 |
18 |
19 | INSTALL_REQUIRES = [
20 | "setuptools",
21 | "google-auth >=1.25.0, <3.0dev",
22 | "google-auth-oauthlib >=0.4.0",
23 | ]
24 |
25 | setup(
26 | name=NAME,
27 | version=versioneer.get_version(),
28 | cmdclass=versioneer.get_cmdclass(),
29 | description="PyData helpers for authenticating to Google APIs",
30 | long_description=readme(),
31 | license="BSD License",
32 | author="The PyData Development Team",
33 | author_email="pydata@googlegroups.com",
34 | url="https://github.com/pydata/pydata-google-auth",
35 | classifiers=[
36 | "Development Status :: 5 - Production/Stable",
37 | "Environment :: Console",
38 | "Intended Audience :: Science/Research",
39 | "Operating System :: OS Independent",
40 | "Programming Language :: Python",
41 | "Programming Language :: Python :: 3.9",
42 | "Programming Language :: Python :: 3.10",
43 | "Programming Language :: Python :: 3.11",
44 | "Programming Language :: Python :: 3.12",
45 | "Programming Language :: Python :: 3.13",
46 | "Topic :: Scientific/Engineering",
47 | "License :: OSI Approved :: BSD License",
48 | ],
49 | keywords="data",
50 | install_requires=INSTALL_REQUIRES,
51 | packages=find_packages(exclude=["contrib", "docs", "tests*"]),
52 | test_suite="tests",
53 | python_requires=">=3.9",
54 | )
55 |
--------------------------------------------------------------------------------
/testing/.gitignore:
--------------------------------------------------------------------------------
1 | test-env.sh
2 | service-account.json
3 | client-secrets.json
--------------------------------------------------------------------------------
/testing/constraints-3.10.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pydata/pydata-google-auth/ab923498a4f923c060c54e3b3913d002fc15bbfc/testing/constraints-3.10.txt
--------------------------------------------------------------------------------
/testing/constraints-3.11.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pydata/pydata-google-auth/ab923498a4f923c060c54e3b3913d002fc15bbfc/testing/constraints-3.11.txt
--------------------------------------------------------------------------------
/testing/constraints-3.12.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pydata/pydata-google-auth/ab923498a4f923c060c54e3b3913d002fc15bbfc/testing/constraints-3.12.txt
--------------------------------------------------------------------------------
/testing/constraints-3.13.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pydata/pydata-google-auth/ab923498a4f923c060c54e3b3913d002fc15bbfc/testing/constraints-3.13.txt
--------------------------------------------------------------------------------
/testing/constraints-3.14.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pydata/pydata-google-auth/ab923498a4f923c060c54e3b3913d002fc15bbfc/testing/constraints-3.14.txt
--------------------------------------------------------------------------------
/testing/constraints-3.15.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pydata/pydata-google-auth/ab923498a4f923c060c54e3b3913d002fc15bbfc/testing/constraints-3.15.txt
--------------------------------------------------------------------------------
/testing/constraints-3.9.txt:
--------------------------------------------------------------------------------
1 | # This constraints file is used to check that lower bounds
2 | # are correct in setup.py
3 | # List *all* library dependencies and extras in this file.
4 | # Pin the version to the lower bound.
5 | #
6 | # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
7 | # Then this file should have foo==1.14.0
8 | google-auth==1.25.0
9 | google-auth-oauthlib==0.4.0
10 |
11 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pydata/pydata-google-auth/ab923498a4f923c060c54e3b3913d002fc15bbfc/tests/__init__.py
--------------------------------------------------------------------------------
/tests/system/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pydata/pydata-google-auth/ab923498a4f923c060c54e3b3913d002fc15bbfc/tests/system/__init__.py
--------------------------------------------------------------------------------
/tests/system/test_auth.py:
--------------------------------------------------------------------------------
1 | """System tests for fetching Google API credentials."""
2 |
3 | try:
4 | import mock
5 | except ImportError: # pragma: NO COVER
6 | from unittest import mock
7 |
8 | from google.auth.exceptions import DefaultCredentialsError
9 |
10 |
11 | TEST_SCOPES = ["https://www.googleapis.com/auth/cloud-platform"]
12 |
13 |
14 | def test_default_gets_valid_credentials():
15 | import pydata_google_auth
16 |
17 | credentials, _ = pydata_google_auth.default(TEST_SCOPES, use_local_webserver=True)
18 | assert credentials.valid
19 |
20 |
21 | def test_default_gets_user_credentials():
22 | import pydata_google_auth
23 |
24 | # Mock google.auth.default to fail, forcing user credentials.
25 | with mock.patch("google.auth.default", side_effect=DefaultCredentialsError()):
26 | credentials, _ = pydata_google_auth.default(
27 | TEST_SCOPES, use_local_webserver=True
28 | )
29 |
30 | assert credentials.valid
31 |
32 |
33 | def test_get_user_credentials_gets_valid_credentials():
34 | import pydata_google_auth
35 |
36 | credentials = pydata_google_auth.get_user_credentials(
37 | TEST_SCOPES, use_local_webserver=True
38 | )
39 |
40 | assert credentials.valid
41 |
42 |
43 | def test_get_user_credentials_noop_gets_valid_credentials():
44 | import pydata_google_auth
45 | import pydata_google_auth.cache
46 |
47 | credentials = pydata_google_auth.get_user_credentials(
48 | TEST_SCOPES,
49 | credentials_cache=pydata_google_auth.cache.NOOP,
50 | use_local_webserver=True,
51 | )
52 |
53 | assert credentials.valid
54 | assert credentials.has_scopes(TEST_SCOPES)
55 |
56 |
57 | def test_get_user_credentials_reauth_gets_valid_credentials():
58 | import pydata_google_auth
59 | import pydata_google_auth.cache
60 |
61 | credentials = pydata_google_auth.get_user_credentials(
62 | TEST_SCOPES,
63 | credentials_cache=pydata_google_auth.cache.REAUTH,
64 | use_local_webserver=True,
65 | )
66 |
67 | assert credentials.valid
68 | assert credentials.has_scopes(TEST_SCOPES)
69 |
--------------------------------------------------------------------------------
/tests/unit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pydata/pydata-google-auth/ab923498a4f923c060c54e3b3913d002fc15bbfc/tests/unit/__init__.py
--------------------------------------------------------------------------------
/tests/unit/test_auth.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | try:
4 | from unittest import mock
5 | except ImportError: # pragma: NO COVER
6 | import mock
7 |
8 | import google.auth
9 | import google.auth.credentials
10 | import google.oauth2.credentials
11 | import pytest
12 |
13 | from google.oauth2 import service_account
14 | from pydata_google_auth import exceptions
15 | import pydata_google_auth.cache
16 |
17 |
18 | TEST_SCOPES = ["https://www.googleapis.com/auth/cloud-platform"]
19 |
20 |
21 | class FakeCredentials(object):
22 | @property
23 | def valid(self):
24 | return True
25 |
26 |
27 | @pytest.fixture
28 | def module_under_test():
29 | from pydata_google_auth import auth
30 |
31 | return auth
32 |
33 |
34 | def test_default_returns_google_auth_credentials(monkeypatch, module_under_test):
35 | def mock_default_credentials(scopes=None, request=None):
36 | return (
37 | mock.create_autospec(google.auth.credentials.Credentials),
38 | "default-project",
39 | )
40 |
41 | monkeypatch.setattr(google.auth, "default", mock_default_credentials)
42 |
43 | credentials, project = module_under_test.default(TEST_SCOPES)
44 | assert project == "default-project"
45 | assert credentials is not None
46 |
47 |
48 | def test_default_loads_user_credentials(monkeypatch, module_under_test):
49 | from pydata_google_auth import cache
50 |
51 | def mock_default_credentials(scopes=None, request=None):
52 | return (None, None)
53 |
54 | monkeypatch.setattr(google.auth, "default", mock_default_credentials)
55 |
56 | mock_cache = mock.create_autospec(cache.CredentialsCache)
57 | mock_user_credentials = mock.create_autospec(google.oauth2.credentials.Credentials)
58 | mock_cache.load.return_value = mock_user_credentials
59 |
60 | credentials, project = module_under_test.default(
61 | TEST_SCOPES, credentials_cache=mock_cache
62 | )
63 | assert project is None
64 | assert credentials is mock_user_credentials
65 |
66 |
67 | def test_get_user_credentials_tries_colab_first(monkeypatch, module_under_test):
68 | colab_auth_module = mock.Mock()
69 | try_colab_auth_import = mock.Mock(return_value=colab_auth_module)
70 | monkeypatch.setattr(
71 | module_under_test, "try_colab_auth_import", try_colab_auth_import
72 | )
73 | default_credentials = mock.create_autospec(google.auth.credentials.Credentials)
74 | default_call_times = 0
75 |
76 | # Can't use a Mock because we want to check authenticate_user.
77 | def mock_default(scopes=None, request=None):
78 | nonlocal default_call_times
79 | default_call_times += 1
80 |
81 | # Make sure colab auth is called first.
82 | colab_auth_module.authenticate_user.assert_called_once_with()
83 |
84 | return (
85 | default_credentials,
86 | "colab-project", # In reality, often None.
87 | )
88 |
89 | monkeypatch.setattr(google.auth, "default", mock_default)
90 |
91 | credentials = module_under_test.get_user_credentials(TEST_SCOPES)
92 |
93 | assert credentials is default_credentials
94 | assert default_call_times == 1
95 |
96 |
97 | def test_get_user_credentials_skips_colab_if_no_colab(monkeypatch, module_under_test):
98 | try_colab_auth_import = mock.Mock(return_value=None)
99 | monkeypatch.setattr(
100 | module_under_test, "try_colab_auth_import", try_colab_auth_import
101 | )
102 | credentials_cache = mock.create_autospec(pydata_google_auth.cache.CredentialsCache)
103 | loaded_credentials = mock.Mock()
104 | credentials_cache.load.return_value = loaded_credentials
105 |
106 | credentials = module_under_test.get_user_credentials(
107 | TEST_SCOPES,
108 | credentials_cache=credentials_cache,
109 | )
110 |
111 | assert credentials is loaded_credentials
112 |
113 |
114 | def test_load_service_account_credentials(monkeypatch, tmp_path, module_under_test):
115 | creds_path = str(tmp_path / "creds.json")
116 | with open(creds_path, "w") as stream:
117 | stream.write("{}")
118 |
119 | fake_creds = FakeCredentials()
120 | mock_service = mock.create_autospec(service_account.Credentials)
121 | mock_service.from_service_account_info.return_value = fake_creds
122 | monkeypatch.setattr(service_account, "Credentials", mock_service)
123 |
124 | creds = module_under_test.load_service_account_credentials(creds_path)
125 | assert creds is fake_creds
126 |
127 |
128 | def test_load_user_credentials_raises_when_file_doesnt_exist(module_under_test):
129 | with pytest.raises(exceptions.PyDataCredentialsError):
130 | module_under_test.load_user_credentials("path/not/found.json")
131 |
132 |
133 | def test_load_service_account_credentials_raises_when_file_doesnt_exist(
134 | module_under_test,
135 | ):
136 | with pytest.raises(exceptions.PyDataCredentialsError):
137 | module_under_test.load_service_account_credentials("path/not/found.json")
138 |
--------------------------------------------------------------------------------
/tests/unit/test_cache.py:
--------------------------------------------------------------------------------
1 | """Test module for pydata_google_auth.cache"""
2 |
3 | import json
4 | import os
5 | import os.path
6 |
7 | import pytest
8 |
9 | import google.oauth2.credentials
10 |
11 | try:
12 | from importlib import reload
13 |
14 | except ImportError: # Py2 compat
15 | from six.moves import reload_module as reload
16 |
17 |
18 | @pytest.fixture
19 | def module_under_test():
20 | from pydata_google_auth import cache
21 |
22 | return cache
23 |
24 |
25 | def test_import_unwriteable_fs(module_under_test, monkeypatch):
26 | """Test import with an unwritable filesystem.
27 |
28 | See: https://github.com/pydata/pydata-google-auth/issues/10
29 | """
30 |
31 | def raise_unwriteable(path):
32 | raise PermissionError()
33 |
34 | monkeypatch.setattr(os.path, "exists", lambda _: False)
35 | monkeypatch.setattr(os, "makedirs", raise_unwriteable)
36 |
37 | reload(module_under_test)
38 |
39 | assert module_under_test.NOOP is not None
40 |
41 |
42 | def test__get_default_credentials_path_windows_wo_appdata(
43 | module_under_test, monkeypatch
44 | ):
45 | # Ensure default path returns something sensible on Windows, even if
46 | # APPDATA is not set. See:
47 | # https://github.com/pydata/pydata-google-auth/issues/29
48 | monkeypatch.setattr(os, "name", "nt")
49 | monkeypatch.delenv("APPDATA", raising=False)
50 |
51 | creds_path = module_under_test._get_default_credentials_path("dirname", "filename")
52 | assert creds_path is not None
53 |
54 |
55 | def test__save_user_account_credentials_wo_directory(module_under_test, fs):
56 | """Directories should be created if they don't exist."""
57 |
58 | credentials = google.oauth2.credentials.Credentials(
59 | token="access_token",
60 | refresh_token="refresh_token",
61 | id_token="id_token",
62 | token_uri="token_uri",
63 | client_id="client_id",
64 | client_secret="client_secret",
65 | scopes=["scopes"],
66 | )
67 | path = "/home/username/.config/pydata/pydata_google_credentials.json"
68 | assert not os.path.exists("/home/username/.config/pydata/")
69 |
70 | module_under_test._save_user_account_credentials(credentials, path)
71 |
72 | with open(path) as fp:
73 | serialized_data = json.load(fp)
74 | assert serialized_data["refresh_token"] == "refresh_token"
75 |
76 | # Set the type so that the cached credentials file can be used as
77 | # application default credentials. See:
78 | # https://github.com/pydata/pydata-google-auth/issues/22
79 | assert serialized_data["type"] == "authorized_user"
80 |
81 |
82 | def test_ReadWriteCredentialsCache_sets_path(module_under_test):
83 | """ReadWriteCredentialsCache ctor should respect dirname and filename.
84 |
85 | See: https://github.com/pydata/pydata-google-auth/issues/16
86 | """
87 | cache = module_under_test.ReadWriteCredentialsCache(
88 | dirname="dirtest", filename="filetest.json"
89 | )
90 | path = os.path.normpath(cache._path)
91 | parts = path.split(os.sep)
92 | assert parts[-2] == "dirtest"
93 | assert parts[-1] == "filetest.json"
94 |
95 |
96 | def test_WriteOnlyCredentialsCache_sets_path(module_under_test):
97 | """ReadWriteCredentialsCache ctor should respect dirname and filename.
98 |
99 | See: https://github.com/pydata/pydata-google-auth/issues/16
100 | """
101 | cache = module_under_test.WriteOnlyCredentialsCache(
102 | dirname="dirtest", filename="filetest.json"
103 | )
104 | path = os.path.normpath(cache._path)
105 | parts = path.split(os.sep)
106 | assert parts[-2] == "dirtest"
107 | assert parts[-1] == "filetest.json"
108 |
--------------------------------------------------------------------------------
/tests/unit/test_webserver.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import socket
4 |
5 | try:
6 | from unittest import mock
7 | except ImportError: # pragma: NO COVER
8 | import mock
9 |
10 | import google_auth_oauthlib.flow
11 | import pytest
12 |
13 | from pydata_google_auth import exceptions
14 |
15 |
16 | @pytest.fixture
17 | def module_under_test():
18 | from pydata_google_auth import _webserver
19 |
20 | return _webserver
21 |
22 |
23 | def test_find_open_port_finds_start_port(monkeypatch, module_under_test):
24 | monkeypatch.setattr(socket, "socket", mock.create_autospec(socket.socket))
25 | port = module_under_test.find_open_port(9999)
26 | assert port == 9999
27 |
28 |
29 | def test_find_open_port_finds_stop_port(monkeypatch, module_under_test):
30 | socket_instance = mock.create_autospec(socket.socket, instance=True)
31 |
32 | def mock_socket(family, type_):
33 | return socket_instance
34 |
35 | monkeypatch.setattr(socket, "socket", mock_socket)
36 | socket_instance.listen.side_effect = [socket.error] * 99 + [None]
37 | port = module_under_test.find_open_port(9000, stop=9100)
38 | assert port == 9099
39 |
40 |
41 | def test_find_open_port_returns_none(monkeypatch, module_under_test):
42 | socket_instance = mock.create_autospec(socket.socket, instance=True)
43 |
44 | def mock_socket(family, type_):
45 | return socket_instance
46 |
47 | monkeypatch.setattr(socket, "socket", mock_socket)
48 | socket_instance.listen.side_effect = socket.error
49 | port = module_under_test.find_open_port(9000)
50 | assert port is None
51 | socket_instance.listen.assert_has_calls(mock.call(1) for _ in range(100))
52 |
53 |
54 | def test_run_local_server_calls_flow(monkeypatch, module_under_test):
55 | mock_flow = mock.create_autospec(
56 | google_auth_oauthlib.flow.InstalledAppFlow, instance=True
57 | )
58 | module_under_test.run_local_server(mock_flow)
59 | mock_flow.run_local_server.assert_called_once_with(host="localhost", port=mock.ANY)
60 |
61 |
62 | def test_run_local_server_raises_connectionerror(monkeypatch, module_under_test):
63 | def mock_find_open_port():
64 | return None
65 |
66 | monkeypatch.setattr(module_under_test, "find_open_port", mock_find_open_port)
67 | mock_flow = mock.create_autospec(
68 | google_auth_oauthlib.flow.InstalledAppFlow, instance=True
69 | )
70 |
71 | with pytest.raises(exceptions.PyDataConnectionError):
72 | module_under_test.run_local_server(mock_flow)
73 |
74 | mock_flow.run_local_server.assert_not_called()
75 |
--------------------------------------------------------------------------------