├── .coveragerc
├── .devcontainer
├── Dockerfile
├── devcontainer.json
└── local_build.sh
├── .gitattributes
├── .github
├── CONTRIBUTING.rst
├── actions
│ └── install_requirements
│ │ └── action.yml
├── dependabot.yml
├── pages
│ ├── index.html
│ └── make_switcher.py
└── workflows
│ ├── code.yml
│ ├── docs.yml
│ └── docs_clean.yml
├── .gitignore
├── .gitremotes
├── .pre-commit-config.yaml
├── .vscode
├── extensions.json
├── launch.json
├── settings.json
└── tasks.json
├── AUTHORS
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.rst
├── Dockerfile
├── LICENSE
├── README.rst
├── docs
├── .notes
├── _static
│ └── theme_overrides.css
├── conf.py
├── explanations.rst
├── explanations
│ ├── folders.rst
│ ├── googleapiissues.rst
│ ├── notes.rst
│ └── tokens.rst
├── how-to.rst
├── how-to
│ ├── comparison.rst
│ ├── cron.rst
│ └── windows.rst
├── images
│ └── logo.png
├── index.rst
├── reference.rst
├── reference
│ ├── api.rst
│ └── contributing.rst
├── tutorials.rst
└── tutorials
│ ├── installation.rst
│ ├── login-images
│ ├── 01-sign-in.png
│ ├── 02-verify.png
│ ├── 03-verify2.png
│ └── 04-access.png
│ ├── login.rst
│ ├── oauth-images
│ ├── 0.png
│ ├── 1.png
│ ├── 10-test_users.png
│ ├── 11-summary.png
│ ├── 12-create_creds.png
│ ├── 14-create_id.png
│ ├── 15-created.png
│ ├── 2.png
│ ├── 3.png
│ ├── 4.png
│ ├── 5.png
│ ├── 6.png
│ ├── 7-oauth_concent.png
│ ├── 8-app_registration.png
│ └── 9-scopes.png
│ └── oauth2.rst
├── pyproject.toml
├── src
└── gphotos_sync
│ ├── BadIds.py
│ ├── BaseMedia.py
│ ├── Checks.py
│ ├── DatabaseMedia.py
│ ├── DbRow.py
│ ├── GoogleAlbumMedia.py
│ ├── GoogleAlbumsRow.py
│ ├── GoogleAlbumsSync.py
│ ├── GooglePhotosDownload.py
│ ├── GooglePhotosIndex.py
│ ├── GooglePhotosMedia.py
│ ├── GooglePhotosRow.py
│ ├── LocalData.py
│ ├── LocalFilesMedia.py
│ ├── LocalFilesRow.py
│ ├── LocalFilesScan.py
│ ├── Logging.py
│ ├── Queries.py
│ ├── Settings.py
│ ├── Utils.py
│ ├── __init__.py
│ ├── __main__.py
│ ├── authorize.py
│ ├── restclient.py
│ └── sql
│ ├── console.sql
│ └── gphotos_create.sql
└── tests
├── __init__.py
├── test-data
├── 1987-JohnWoodAndGiles.jpg
├── 20180126_185832.jpg
├── IMG_20180908_132733-gphotos.jpg
├── IMG_20180908_132733-insync.jpg
├── IMG_20190102_112832.jpg
├── PIC00002 (2).jpg
└── PIC00002.jpg
├── test_account.py
├── test_boilerplate_removed.py
├── test_cli.py
├── test_credentials
├── .gphotos.token
└── client_secret.json
├── test_full_library.py
├── test_setup.py
├── test_system
├── __init__.py
├── test_database.py
├── test_network.py
├── test_regression.py
├── test_requests.py
└── test_system.py
└── test_units
├── __init__.py
├── test_errors.py
├── test_local_scan.py
└── test_units.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit = tests/*
3 |
4 | [report]
5 | # Regexes for lines to exclude from consideration
6 | exclude_lines =
7 | # Have to re-enable the standard pragma
8 | pragma: no cover
9 |
10 | # Don't complain about missing debug-only code:
11 | def __repr__
12 | if self\.debug
13 |
14 | # Don't complain if tests don't hit defensive assertion code:
15 | raise AssertionError
16 | raise NotImplementedError
17 |
--------------------------------------------------------------------------------
/.devcontainer/Dockerfile:
--------------------------------------------------------------------------------
1 | # ideas from https://www.docker.com/blog/containerized-python-development-part-1/
2 |
3 | # This file is for use as a .vscode devcontainer as well as a runtime
4 | # container. The devcontainer should be rootful and use podman or docker
5 | # with user namespaces.
6 |
7 | ARG BASE="mcr.microsoft.com/devcontainers/python:dev-3.12-bullseye"
8 | FROM ${BASE} as base
9 |
10 | # use root to pin where the packages will install
11 | USER root
12 | ENV PATH=/root/.local/bin:$PATH
13 |
14 | FROM base as developer
15 |
16 | ARG DEBIAN_FRONTEND=noninteractive
17 | RUN apt-get update && \
18 | apt-get install -y --no-install-recommends \
19 | gcc python3-dev && \
20 | rm -rf /var/lib/apt/lists/*
21 |
22 | WORKDIR /workspace
23 | COPY . .
24 |
25 | # install runtime from DIST if there is one
26 | RUN mkdir -vp /root/.local && \
27 | if [ -d dist ] ; then \
28 | touch requirements.txt && \
29 | pip install --no-cache --user -r requirements.txt dist/*.whl ; \
30 | fi
31 |
32 | FROM base as runtime
33 |
34 | COPY --from=developer /root/.local /root/.local
35 |
36 | RUN mkdir -vp /root/.config /config \
37 | && ln -vs /config /root/.config/gphotos-sync \
38 | && mkdir -vp /storage
39 |
40 | # make the installed version of gphotos-sync available to non root users
41 | RUN chmod -R a+rx /root
42 | ENV HOME=/root
43 |
44 | VOLUME /config /storage
45 |
46 | ENTRYPOINT ["gphotos-sync"]
47 | CMD ["--version"]
48 |
49 | EXPOSE 8080
50 |
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | // For format details, see https://containers.dev/implementors/json_reference/
2 | {
3 | "name": "Python 3 Developer Container",
4 | "build": {
5 | "dockerfile": "../Dockerfile",
6 | "target": "build",
7 | // Only upgrade pip, we will install the project below
8 | "args": {
9 | "PIP_OPTIONS": "--upgrade pip"
10 | }
11 | },
12 | "remoteEnv": {
13 | "DISPLAY": "${localEnv:DISPLAY}"
14 | },
15 | // Add the URLs of features you want added when the container is built.
16 | "features": {
17 | "ghcr.io/devcontainers/features/common-utils:1": {
18 | "username": "none",
19 | "upgradePackages": false
20 | }
21 | },
22 | "customizations": {
23 | "vscode": {
24 | // Add the IDs of extensions you want installed when the container is created.
25 | "extensions": [
26 | "ms-python.python",
27 | "tamasfe.even-better-toml",
28 | "redhat.vscode-yaml",
29 | "ryanluker.vscode-coverage-gutters"
30 | ]
31 | }
32 | },
33 | // Make sure the files we are mapping into the container exist on the host
34 | "initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'",
35 | "runArgs": [
36 | "--net=host",
37 | "--security-opt=label=type:container_runtime_t"
38 | ],
39 | "mounts": [
40 | "source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind",
41 | "source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind",
42 | // map in home directory - not strictly necessary but useful
43 | "source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached"
44 | ],
45 | // make the workspace folder the same inside and outside of the container
46 | "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind",
47 | "workspaceFolder": "${localWorkspaceFolder}",
48 | // After the container is created, install the python project in editable form
49 | "postCreateCommand": "pip install -e '.[dev]'"
50 | }
--------------------------------------------------------------------------------
/.devcontainer/local_build.sh:
--------------------------------------------------------------------------------
1 | # locally build a runtime container for testing
2 |
3 | THIS_DIR=$(dirname $(realpath $0))
4 | PYTHON_ROOT=$(realpath $THIS_DIR/..)
5 |
6 | # first make sure a wheel is built
7 | (
8 | cd ${PYTHON_ROOT}
9 | pip install build
10 | rm -r dist
11 | python -m build --wheel
12 | )
13 |
14 | # make the container name the same as the root folder name of this clone
15 | container_name=$(cd ${PYTHON_ROOT} ; basename $(realpath .))
16 | echo building $container_name ...
17 |
18 | # run the build with required build-args for a runtime build
19 | cd ${THIS_DIR}
20 | ln -s ../dist .
21 | docker build --build-arg BASE=python:3.12-slim -t $container_name .. --file ./Dockerfile
22 | unlink dist
23 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | src/*/_version_git.py export-subst
2 |
--------------------------------------------------------------------------------
/.github/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | Contributing to the project
2 | ===========================
3 |
4 | Contributions and issues are most welcome! All issues and pull requests are
5 | handled through GitHub_. Also, please check for any existing issues before
6 | filing a new one. If you have a great idea but it involves big changes, please
7 | file a ticket before making a pull request! We want to make sure you don't spend
8 | your time coding something that might not fit the scope of the project.
9 |
10 | .. _GitHub: https://github.com/gilesknap/gphotos-sync/issues
11 |
12 | Issue or Discussion?
13 | --------------------
14 |
15 | Github also offers discussions_ as a place to ask questions and share ideas. If
16 | your issue is open ended and it is not obvious when it can be "closed", please
17 | raise it as a discussion instead.
18 |
19 | .. _discussions: https://github.com/gilesknap/gphotos-sync/discussions
20 |
21 | Code coverage
22 | -------------
23 |
24 | While 100% code coverage does not make a library bug-free, it significantly
25 | reduces the number of easily caught bugs! Please make sure coverage remains the
26 | same or is improved by a pull request!
27 |
28 | Developer guide
29 | ---------------
30 |
31 | The `Developer Guide`_ contains information on setting up a development
32 | environment, running the tests and what standards the code and documentation
33 | should follow.
34 |
35 | .. _Developer Guide: https://diamondlightsource.github.io/gphotos-sync/main/developer/how-to/contribute.html
36 |
--------------------------------------------------------------------------------
/.github/actions/install_requirements/action.yml:
--------------------------------------------------------------------------------
1 | name: Install requirements
2 | description: Run pip install with requirements and upload resulting requirements
3 | inputs:
4 | requirements_file:
5 | description: Name of requirements file to use and upload
6 | required: true
7 | install_options:
8 | description: Parameters to pass to pip install
9 | required: true
10 | python_version:
11 | description: Python version to install
12 | default: "3.x"
13 |
14 | runs:
15 | using: composite
16 |
17 | steps:
18 | - name: Setup python
19 | uses: actions/setup-python@v4
20 | with:
21 | python-version: ${{ inputs.python_version }}
22 |
23 | - name: Pip install
24 | run: |
25 | touch ${{ inputs.requirements_file }}
26 | # -c uses requirements.txt as constraints, see 'Validate requirements file'
27 | pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }}
28 | shell: bash
29 |
30 | - name: Create lockfile
31 | run: |
32 | mkdir -p lockfiles
33 | pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }}
34 | # delete the self referencing line and make sure it isn't blank
35 | sed -i'' -e '/file:/d' lockfiles/${{ inputs.requirements_file }}
36 | shell: bash
37 |
38 | - name: Upload lockfiles
39 | uses: actions/upload-artifact@v3
40 | with:
41 | name: lockfiles
42 | path: lockfiles
43 |
44 | # This eliminates the class of problems where the requirements being given no
45 | # longer match what the packages themselves dictate. E.g. In the rare instance
46 | # where I install some-package which used to depend on vulnerable-dependency
47 | # but now uses good-dependency (despite being nominally the same version)
48 | # pip will install both if given a requirements file with -r
49 | - name: If requirements file exists, check it matches pip installed packages
50 | run: |
51 | if [ -s ${{ inputs.requirements_file }} ]; then
52 | if ! diff -u ${{ inputs.requirements_file }} lockfiles/${{ inputs.requirements_file }}; then
53 | echo "Error: ${{ inputs.requirements_file }} need the above changes to be exhaustive"
54 | exit 1
55 | fi
56 | fi
57 | shell: bash
58 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "github-actions"
9 | directory: "/"
10 | schedule:
11 | interval: "weekly"
12 |
13 | - package-ecosystem: "pip"
14 | directory: "/"
15 | schedule:
16 | interval: "weekly"
17 |
--------------------------------------------------------------------------------
/.github/pages/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Redirecting to main branch
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/.github/pages/make_switcher.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | from argparse import ArgumentParser
4 | from pathlib import Path
5 | from subprocess import CalledProcessError, check_output
6 | from typing import List, Optional
7 |
8 |
9 | def report_output(stdout: bytes, label: str) -> List[str]:
10 | ret = stdout.decode().strip().split("\n")
11 | print(f"{label}: {ret}")
12 | return ret
13 |
14 |
15 | def get_branch_contents(ref: str) -> List[str]:
16 | """Get the list of directories in a branch."""
17 | stdout = check_output(["git", "ls-tree", "-d", "--name-only", ref])
18 | return report_output(stdout, "Branch contents")
19 |
20 |
21 | def get_sorted_tags_list() -> List[str]:
22 | """Get a list of sorted tags in descending order from the repository."""
23 | stdout = check_output(["git", "tag", "-l", "--sort=-v:refname"])
24 | return report_output(stdout, "Tags list")
25 |
26 |
27 | def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[str]:
28 | """Generate the file containing the list of all GitHub Pages builds."""
29 | # Get the directories (i.e. builds) from the GitHub Pages branch
30 | try:
31 | builds = set(get_branch_contents(ref))
32 | except CalledProcessError:
33 | builds = set()
34 | logging.warning(f"Cannot get {ref} contents")
35 |
36 | # Add and remove from the list of builds
37 | if add:
38 | builds.add(add)
39 | if remove:
40 | assert remove in builds, f"Build '{remove}' not in {sorted(builds)}"
41 | builds.remove(remove)
42 |
43 | # Get a sorted list of tags
44 | tags = get_sorted_tags_list()
45 |
46 | # Make the sorted versions list from main branches and tags
47 | versions: List[str] = []
48 | for version in ["master", "main"] + tags:
49 | if version in builds:
50 | versions.append(version)
51 | builds.remove(version)
52 |
53 | # Add in anything that is left to the bottom
54 | versions += sorted(builds)
55 | print(f"Sorted versions: {versions}")
56 | return versions
57 |
58 |
59 | def write_json(path: Path, repository: str, versions: str):
60 | org, repo_name = repository.split("/")
61 | struct = [
62 | {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"}
63 | for version in versions
64 | ]
65 | text = json.dumps(struct, indent=2)
66 | print(f"JSON switcher:\n{text}")
67 | path.write_text(text, encoding="utf-8")
68 |
69 |
70 | def main(args=None):
71 | parser = ArgumentParser(
72 | description="Make a versions.txt file from gh-pages directories"
73 | )
74 | parser.add_argument(
75 | "--add",
76 | help="Add this directory to the list of existing directories",
77 | )
78 | parser.add_argument(
79 | "--remove",
80 | help="Remove this directory from the list of existing directories",
81 | )
82 | parser.add_argument(
83 | "repository",
84 | help="The GitHub org and repository name: ORG/REPO",
85 | )
86 | parser.add_argument(
87 | "output",
88 | type=Path,
89 | help="Path of write switcher.json to",
90 | )
91 | args = parser.parse_args(args)
92 |
93 | # Write the versions file
94 | versions = get_versions("origin/gh-pages", args.add, args.remove)
95 | write_json(args.output, args.repository, versions)
96 |
97 |
98 | if __name__ == "__main__":
99 | main()
100 |
--------------------------------------------------------------------------------
/.github/workflows/code.yml:
--------------------------------------------------------------------------------
1 | name: Code CI
2 |
3 | on:
4 | push:
5 | pull_request:
6 | env:
7 | # The target python version, which must match the Dockerfile version
8 | CONTAINER_PYTHON: "3.12"
9 |
10 | jobs:
11 | lint:
12 | # pull requests are a duplicate of a branch push if within the same repo.
13 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository
14 | runs-on: ubuntu-latest
15 |
16 | steps:
17 | - name: Checkout
18 | uses: actions/checkout@v4
19 |
20 | - name: Install python packages
21 | uses: ./.github/actions/install_requirements
22 | with:
23 | python_version: "3.12"
24 | requirements_file: requirements-dev-3.x.txt
25 | install_options: -e .[dev]
26 |
27 | - name: Lint
28 | run: tox -e pre-commit,mypy
29 |
30 | test:
31 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository
32 | strategy:
33 | fail-fast: false
34 | matrix:
35 | os: ["ubuntu-latest"] # can add windows-latest, macos-latest
36 | python: ["3.9", "3.10", "3.11", "3.12"]
37 | install: ["-e .[dev]"]
38 | # Make one version be non-editable to test both paths of version code
39 | include:
40 | - os: "ubuntu-latest"
41 | python: "3.8"
42 | install: ".[dev]"
43 |
44 | runs-on: ${{ matrix.os }}
45 | env:
46 | # https://github.com/pytest-dev/pytest/issues/2042
47 | PY_IGNORE_IMPORTMISMATCH: "1"
48 |
49 | steps:
50 | - name: Checkout
51 | uses: actions/checkout@v4
52 | with:
53 | # Need this to get version number from last tag
54 | fetch-depth: 0
55 |
56 | - name: Install python packages
57 | uses: ./.github/actions/install_requirements
58 | with:
59 | python_version: ${{ matrix.python }}
60 | requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt
61 | install_options: ${{ matrix.install }}
62 |
63 | - name: List dependency tree
64 | run: pipdeptree
65 |
66 | - name: Run tests
67 | run: tox -e pytest
68 |
69 | - name: Upload coverage to Codecov
70 | uses: codecov/codecov-action@v3
71 | with:
72 | name: ${{ matrix.python }}/${{ matrix.os }}
73 | files: cov.xml
74 |
75 | dist:
76 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository
77 | runs-on: "ubuntu-latest"
78 |
79 | steps:
80 | - name: Checkout
81 | uses: actions/checkout@v4
82 | with:
83 | # Need this to get version number from last tag
84 | fetch-depth: 0
85 |
86 | - name: Build sdist and wheel
87 | run: |
88 | export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \
89 | pipx run build
90 |
91 | - name: Upload sdist and wheel as artifacts
92 | uses: actions/upload-artifact@v3
93 | with:
94 | name: dist
95 | path: dist
96 |
97 | - name: Check for packaging errors
98 | run: pipx run twine check --strict dist/*
99 |
100 | - name: Install python packages
101 | uses: ./.github/actions/install_requirements
102 | with:
103 | python_version: ${{env.CONTAINER_PYTHON}}
104 | requirements_file: requirements.txt
105 | install_options: dist/*.whl
106 |
107 | - name: Test module --version works using the installed wheel
108 | # Check that the command line entry point works
109 | run: gphotos-sync --version
110 |
111 | release:
112 | # upload to PyPI and make a release on every tag
113 | needs: [lint, dist, test]
114 | if: ${{ github.event_name == 'push' && github.ref_type == 'tag' }}
115 | runs-on: ubuntu-latest
116 | env:
117 | HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }}
118 |
119 | steps:
120 | - uses: actions/download-artifact@v3
121 |
122 | - name: Fixup blank lockfiles
123 | # Github release artifacts can't be blank
124 | run: for f in lockfiles/*; do [ -s $f ] || echo '# No requirements' >> $f; done
125 |
126 | - name: Github Release
127 | # We pin to the SHA, not the tag, for security reasons.
128 | # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions
129 | uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15
130 | with:
131 | prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }}
132 | files: |
133 | dist/*
134 | lockfiles/*
135 | generate_release_notes: true
136 | env:
137 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
138 |
139 | - name: Publish to PyPI
140 | if: ${{ env.HAS_PYPI_TOKEN }}
141 | uses: pypa/gh-action-pypi-publish@release/v1
142 | with:
143 | password: ${{ secrets.PYPI_TOKEN }}
144 |
145 | make-container:
146 | needs: [lint, dist, test]
147 | runs-on: ubuntu-latest
148 | permissions:
149 | contents: read
150 | packages: write
151 |
152 | steps:
153 | - name: checkout
154 | uses: actions/checkout@v2
155 |
156 | - uses: actions/download-artifact@v2
157 | with:
158 | name: dist
159 | path: dist
160 |
161 | - name: Cache Docker layers
162 | uses: actions/cache@v2
163 | with:
164 | path: /tmp/.buildx-cache
165 | key: ${{ runner.os }}-buildx-${{ github.sha }}
166 | restore-keys: |
167 | ${{ runner.os }}-buildx-
168 |
169 | - name: Login to Docker Hub
170 | uses: docker/login-action@v1
171 | with:
172 | username: ${{ secrets.DOCKERHUB_USERNAME }}
173 | password: ${{ secrets.DOCKERHUB_TOKEN }}
174 |
175 | - name: Log in to GitHub Docker Registry
176 | uses: docker/login-action@v1
177 | with:
178 | registry: ghcr.io
179 | username: ${{ github.actor }}
180 | password: ${{ secrets.GITHUB_TOKEN }}
181 |
182 | - name: Docker meta
183 | id: meta
184 | uses: docker/metadata-action@v4
185 | with:
186 | images: |
187 | ghcr.io/${{ github.repository }}
188 | # github repo and dockerhub tag must match for this to work
189 | ${{ github.repository }}
190 | # all pull requests share a single tag 'pr'
191 | tags: |
192 | type=ref,event=branch
193 | type=ref,event=tag
194 | type=raw,value=latest,enable=${{ github.event_name != 'pull_request' }}
195 | type=raw,value=pr
196 |
197 | # required for multi-arch build
198 | - name: Set up QEMU
199 | uses: docker/setup-qemu-action@v1
200 |
201 | - name: Set up Docker Buildx
202 | id: buildx
203 | uses: docker/setup-buildx-action@v1
204 |
205 | - name: Build runtime image
206 | uses: docker/build-push-action@v3
207 | with:
208 | file: .devcontainer/Dockerfile
209 | context: .
210 | platforms: linux/amd64,linux/arm/v7,linux/arm64/v8
211 | push: true
212 | build-args: BASE=python:3.12-slim
213 | tags: ${{ steps.meta.outputs.tags }}
214 | labels: ${{ steps.meta.outputs.labels }}
215 | cache-from: type=local,src=/tmp/.buildx-cache
216 | cache-to: type=local,dest=/tmp/.buildx-cache
217 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | name: Docs CI
2 |
3 | on:
4 | push:
5 | pull_request:
6 |
7 | jobs:
8 | docs:
9 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - name: Avoid git conflicts when tag and branch pushed at same time
14 | if: startsWith(github.ref, 'refs/tags')
15 | run: sleep 60
16 |
17 | - name: Checkout
18 | uses: actions/checkout@v4
19 | with:
20 | # Need this to get version number from last tag
21 | fetch-depth: 0
22 |
23 | - name: Install system packages
24 | # Can delete this if you don't use graphviz in your docs
25 | run: sudo apt-get install graphviz
26 |
27 | - name: Install python packages
28 | uses: ./.github/actions/install_requirements
29 | with:
30 | requirements_file: requirements-dev-3.x.txt
31 | install_options: -e .[dev]
32 |
33 | - name: Build docs
34 | run: tox -e docs
35 |
36 | - name: Sanitize ref name for docs version
37 | run: echo "DOCS_VERSION=${GITHUB_REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV
38 |
39 | - name: Move to versioned directory
40 | run: mv build/html .github/pages/$DOCS_VERSION
41 |
42 | - name: Write switcher.json
43 | run: python .github/pages/make_switcher.py --add $DOCS_VERSION ${{ github.repository }} .github/pages/switcher.json
44 |
45 | - name: Publish Docs to gh-pages
46 | if: github.event_name == 'push' && github.actor != 'dependabot[bot]'
47 | # We pin to the SHA, not the tag, for security reasons.
48 | # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions
49 | uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1
50 | with:
51 | github_token: ${{ secrets.GITHUB_TOKEN }}
52 | publish_dir: .github/pages
53 | keep_files: true
54 |
--------------------------------------------------------------------------------
/.github/workflows/docs_clean.yml:
--------------------------------------------------------------------------------
1 | name: Docs Cleanup CI
2 |
3 | # delete branch documentation when a branch is deleted
4 | # also allow manually deleting a documentation version
5 | on:
6 | delete:
7 | workflow_dispatch:
8 | inputs:
9 | version:
10 | description: "documentation version to DELETE"
11 | required: true
12 | type: string
13 |
14 | jobs:
15 | remove:
16 | if: github.event.ref_type == 'branch' || github.event_name == 'workflow_dispatch'
17 | runs-on: ubuntu-latest
18 |
19 | steps:
20 | - name: Checkout
21 | uses: actions/checkout@v4
22 | with:
23 | ref: gh-pages
24 |
25 | - name: removing documentation for branch ${{ github.event.ref }}
26 | if: ${{ github.event_name != 'workflow_dispatch' }}
27 | run: echo "REF_NAME=${{ github.event.ref }}" >> $GITHUB_ENV
28 |
29 | - name: manually removing documentation version ${{ github.event.inputs.version }}
30 | if: ${{ github.event_name == 'workflow_dispatch' }}
31 | run: echo "REF_NAME=${{ github.event.inputs.version }}" >> $GITHUB_ENV
32 |
33 | - name: Sanitize ref name for docs version
34 | run: echo "DOCS_VERSION=${REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV
35 |
36 | - name: update index and push changes
37 | run: |
38 | rm -r $DOCS_VERSION
39 | python make_switcher.py --remove $DOCS_VERSION ${{ github.repository }} switcher.json
40 | git config --global user.name 'GitHub Actions Docs Cleanup CI'
41 | git config --global user.email 'GithubActionsCleanup@noreply.github.com'
42 | git commit -am "Removing redundant docs version $DOCS_VERSION"
43 | git push
44 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # C extensions
6 | *.so
7 |
8 | # Distribution / packaging
9 | .Python
10 | env/
11 | .venv/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | **/_version.py
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | cov.xml
47 | .pytest_cache/
48 | .mypy_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 |
57 | # Sphinx documentation
58 | docs/_build/
59 |
60 | # PyBuilder
61 | target/
62 |
63 | # likely venv names
64 | .venv*
65 | venv*
66 |
67 | # further build artifacts
68 | lockfiles/
69 |
70 | # ruff cache
71 | .ruff_cache/
72 |
--------------------------------------------------------------------------------
/.gitremotes:
--------------------------------------------------------------------------------
1 | github git@github.com:gilesknap/gphotos-sync.git
2 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v4.4.0
4 | hooks:
5 | - id: check-added-large-files
6 | - id: check-yaml
7 | - id: check-merge-conflict
8 |
9 | - repo: local
10 | hooks:
11 | - id: black
12 | name: Run black
13 | stages: [commit]
14 | language: system
15 | entry: black --check --diff
16 | types: [python]
17 |
18 | - id: ruff
19 | name: Run ruff
20 | stages: [commit]
21 | language: system
22 | entry: ruff check
23 | types: [python]
24 |
25 | - repo: https://github.com/codespell-project/codespell
26 | # Configuration for codespell is in pyproject.toml
27 | rev: v2.2.6
28 | hooks:
29 | - id: codespell
30 | additional_dependencies:
31 | - tomli
32 |
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "ms-vscode-remote.remote-containers",
4 | "ms-python.python",
5 | "tamasfe.even-better-toml",
6 | "redhat.vscode-yaml",
7 | "ryanluker.vscode-coverage-gutters",
8 | "charliermarsh.Ruff"
9 | ]
10 | }
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | // Use IntelliSense to learn about possible attributes.
3 | // Hover to view descriptions of existing attributes.
4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5 | "version": "0.2.0",
6 | "configurations": [
7 | {
8 | "name": "Debug Unit Test",
9 | "type": "python",
10 | "request": "launch",
11 | "justMyCode": false,
12 | "program": "${file}",
13 | "purpose": [
14 | "debug-test"
15 | ],
16 | "console": "integratedTerminal",
17 | "env": {
18 | // The default config in pyproject.toml's "[tool.pytest.ini_options]" adds coverage.
19 | // Cannot have coverage and debugging at the same time.
20 | // https://github.com/microsoft/vscode-python/issues/693
21 | "PYTEST_ADDOPTS": "--no-cov"
22 | },
23 | }
24 | ]
25 | }
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "python.testing.pytestArgs": [
3 | "--cov=python3_pip_skeleton",
4 | "--cov-report",
5 | "xml:cov.xml"
6 | ],
7 | "python.testing.unittestEnabled": false,
8 | "python.testing.pytestEnabled": true,
9 | "editor.formatOnSave": true,
10 | "[python]": {
11 | "editor.codeActionsOnSave": {
12 | "source.fixAll.ruff": "never",
13 | "source.organizeImports.ruff": "explicit"
14 | }
15 | }
16 | }
--------------------------------------------------------------------------------
/.vscode/tasks.json:
--------------------------------------------------------------------------------
1 | // See https://go.microsoft.com/fwlink/?LinkId=733558
2 | // for the documentation about the tasks.json format
3 | {
4 | "version": "2.0.0",
5 | "tasks": [
6 | {
7 | "type": "shell",
8 | "label": "Tests, lint and docs",
9 | "command": "tox -p",
10 | "options": {
11 | "cwd": "${workspaceRoot}"
12 | },
13 | "problemMatcher": [],
14 | }
15 | ]
16 | }
17 |
--------------------------------------------------------------------------------
/AUTHORS:
--------------------------------------------------------------------------------
1 | David Grant
2 | Giles Knap
3 | The Codacy Badger
4 | Yann Rouillard
5 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | Be nice and take it easy.
4 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | Contributing
2 | ============
3 |
4 | Contributions and issues are most welcome! All issues and pull requests are
5 | handled through GitHub_. Also, please check for any existing issues before
6 | filing a new one. If you have a great idea but it involves big changes, please
7 | file a ticket before making a pull request! We want to make sure you don't spend
8 | your time coding something that might not fit the scope of the project.
9 |
10 | .. _GitHub: https://github.com/gilesknap/gphotos-sync/issues
11 |
12 | Running the tests
13 | -----------------
14 |
15 | To run in a container
16 | ~~~~~~~~~~~~~~~~~~~~~
17 |
18 | Use vscode devcontainer as follows::
19 |
20 | $ git clone git://github.com/gilesknap/gphotos-sync.git
21 | $ vscode gphotos-sync
22 | Click on 'Reopen in Container' when prompted
23 | In a vscode Terminal:
24 | $ tox -p
25 |
26 |
27 | To run locally
28 | ~~~~~~~~~~~~~~
29 |
30 | Get the source source code and run the unit tests directly
31 | on your workstation as follows::
32 |
33 | $ git clone git://github.com/gilesknap/gphotos-sync.git
34 | $ cd gphotos-sync
35 | $ virtualenv .venv
36 | $ source .venv/bin/activate
37 | $ pip install -e .[dev]
38 | $ tox -p
39 |
40 | In both cases tox -p runs in parallel the following checks:
41 |
42 | - Build Spinx Documentation
43 | - run pytest on all tests in ./tests
44 | - run mypy linting on all files in ./src ./tests
45 | - run pre-commit checks:
46 |
47 | - run flake8 style checks against all source
48 | - run black formatting checks against all source
49 |
50 | While 100% code coverage does not make a library bug-free, it significantly
51 | reduces the number of easily caught bugs! Please make sure coverage remains the
52 | same or is improved by a pull request!
53 |
54 | Code Styling
55 | ------------
56 |
57 | The code in this repository conforms to standards set by the following tools:
58 |
59 | - black_ for code formatting
60 | - flake8_ for style checks
61 | - isort_ for import ordering
62 | - mypy_ for static type checking
63 |
64 | These checks will be run by pre-commit_. You can either choose to run these
65 | tests on all files tracked by git::
66 |
67 | $ tox -e pre-commit,mypy
68 |
69 | Or you can install a pre-commit hook that will run each time you do a ``git
70 | commit`` on just the files that have changed::
71 |
72 | $ pre-commit install
73 |
74 | .. _black: https://github.com/psf/black
75 | .. _flake8: https://flake8.pycqa.org/en/latest/
76 | .. _isort: https://github.com/PyCQA/isort
77 | .. _mypy: https://github.com/python/mypy
78 | .. _pre-commit: https://pre-commit.com/
79 |
80 | Docstrings are pre-processed using the Sphinx Napoleon extension. As such,
81 | google-style_ is considered as standard for this repository. Please use type
82 | hints in the function signature for types. For example::
83 |
84 | def func(arg1: str, arg2: int) -> bool:
85 | """Summary line.
86 |
87 | Extended description of function.
88 |
89 | Args:
90 | arg1: Description of arg1
91 | arg2: Description of arg2
92 |
93 | Returns:
94 | Description of return value
95 | """
96 | return True
97 |
98 | .. _google-style: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/index.html#google-vs-numpy
99 |
100 | Documentation
101 | -------------
102 |
103 | Documentation is contained in the ``docs`` directory and extracted from
104 | docstrings of the API.
105 |
106 | Docs follow the underlining convention::
107 |
108 | Headling 1 (page title)
109 | =======================
110 |
111 | Heading 2
112 | ---------
113 |
114 | Heading 3
115 | ~~~~~~~~~
116 |
117 | You can build the docs from the project directory by running::
118 |
119 | $ tox -e docs
120 | $ firefox build/html/index.html
121 |
122 | Release Process
123 | ---------------
124 |
125 | To make a new release, please follow this checklist:
126 |
127 | - Choose a new PEP440 compliant release number
128 | - Git tag the version
129 | - Push to GitHub and the actions will make a release on pypi
130 | - Push to internal gitlab and do a dls-release.py of the tag
131 | - Check and edit for clarity the autogenerated GitHub release_
132 |
133 | .. _release: https://github.com/gilesknap/gphotos-sync/releases
134 |
135 | Updating the tools
136 | ------------------
137 |
138 | This module is merged with the dls-python3-skeleton_. This is a generic
139 | Python project structure which provides a means to keep tools and
140 | techniques in sync between multiple Python projects. To update to the
141 | latest version of the skeleton, run::
142 |
143 | $ git pull https://github.com/dls-controls/dls-python3-skeleton skeleton
144 |
145 | Any merge conflicts will indicate an area where something has changed that
146 | conflicts with the setup of the current module. Check the `closed pull requests
147 | `_
148 | of the skeleton module for more details.
149 |
150 | .. _dls-python3-skeleton: https://dls-controls.github.io/dls-python3-skeleton
151 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # This file is for use as a devcontainer and a runtime container
2 | #
3 | # The devcontainer should use the build target and run as root with podman
4 | # or docker with user namespaces.
5 | #
6 | FROM python:3.11 as build
7 |
8 | ARG PIP_OPTIONS=.
9 |
10 | # Add any system dependencies for the developer/build environment here e.g.
11 | # RUN apt-get update && apt-get upgrade -y && \
12 | # apt-get install -y --no-install-recommends \
13 | # desired-packages \
14 | # && rm -rf /var/lib/apt/lists/*
15 |
16 | # set up a virtual environment and put it in PATH
17 | RUN python -m venv /venv
18 | ENV PATH=/venv/bin:$PATH
19 |
20 | # Copy any required context for the pip install over
21 | COPY . /context
22 | WORKDIR /context
23 |
24 | # install python package into /venv
25 | RUN pip install ${PIP_OPTIONS}
26 |
27 | FROM python:3.11-slim as runtime
28 |
29 | # Add apt-get system dependencies for runtime here if needed
30 |
31 | # copy the virtual environment from the build stage and put it in PATH
32 | COPY --from=build /venv/ /venv/
33 | ENV PATH=/venv/bin:$PATH
34 |
35 | # change this entrypoint if it is not the same as the repo
36 | ENTRYPOINT ["gphotos-sync"]
37 | CMD ["--version"]
38 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | So long and thanks for all the contributions!
2 | =============================================
3 |
4 | This project was archived on Oct 4th 2024
5 | -----------------------------------------
6 | For the details behind this decision please see https://github.com/gilesknap/gphotos-sync-discussion/discussions/1
7 |
8 | ---------------------------------------------------------------
9 | .
10 | .
11 | .
12 | .
13 | .
14 | .
15 | .
16 | .
17 | .
18 | .
19 | .
20 | .
21 | .
22 | .
23 | .
24 | .
25 | .
26 | .
27 | .
28 | .
29 | .
30 | .
31 | .
32 | .
33 | .
34 | .
35 | .
36 | .
37 | .
38 | .
39 | .
40 | .
41 | .
42 | .
43 | .
44 | .
45 | .
46 | .
47 | .
48 | .
49 | .
50 | .
51 | .
52 | .
53 | .
54 | .
55 | .
56 | .
57 | .
58 | .
59 | .
60 | .
61 | .
62 | .
63 | .
64 | ---------------------------------------------------------------
65 |
66 |
67 | Google Photos Sync (gphotos-sync)
68 | =================================
69 |
70 | |code_ci| |docs_ci| |coverage| |pypi_version| |license|
71 |
72 | Google Photos Sync is a backup tool for your Google Photos cloud storage.
73 |
74 | ============== ==============================================================
75 | PyPI ``pip install gphotos-sync``
76 | Source code https://github.com/gilesknap/gphotos-sync
77 | Documentation https://gilesknap.github.io/gphotos-sync
78 | Releases https://github.com/gilesknap/gphotos-sync/releases
79 | ============== ==============================================================
80 |
81 | Intro
82 | =====
83 | Google Photos Sync downloads all photos and videos the user has uploaded to
84 | Google Photos. It also organizes the media in the local file system using
85 | album information. Additional Google Photos 'Creations' such as
86 | animations, panoramas, movies, effects and collages are also backed up.
87 |
88 | This software is read only and never modifies your cloud library in any way,
89 | so there is no risk of damaging your data.
90 |
91 | WARNING: Index DB Schema Change
92 | ===============================
93 |
94 | 3.2.4 introduces a change to the index database schema in order to distinguish between
95 | private albums and shared albums. You will be required to rebuild your index
96 | database when upgrading to this version or later.
97 |
98 | Please use the command line option ``--flush-index`` to rebuild the index database.
99 |
100 | This need only be done for the first invocation of the new version.
101 |
102 | If you have not done so you will see the error message:
103 |
104 | ``sqlite3.OperationalError: no such column: IsSharedAlbum``
105 |
106 |
107 | Warning: Google API Issues
108 | ==========================
109 |
110 | There are a number of long standing issues with the Google Photos API that mean it is not possible
111 | to make a true backup of your media. In particular:
112 |
113 | - Videos are transcoded to lower quality
114 | - Raw or Original photos are converted to 'High Quality'
115 | - GPS info is removed from photos metadata
116 |
117 | For details of the Bugs reported to Google see https://github.com/gilesknap/gphotos-sync/issues/119.
118 |
119 | To join in a discussion on this issue see https://github.com/gilesknap/gphotos-sync/discussions/347.
120 |
121 |
122 | Quick Start
123 | ===========
124 |
125 | To get started see `Tutorial `_
126 |
127 |
128 | .. |code_ci| image:: https://github.com/gilesknap/gphotos-sync/workflows/Code%20CI/badge.svg?branch=main
129 | :target: https://github.com/gilesknap/gphotos-sync/actions?query=workflow%3A%22Code+CI%22
130 | :alt: Code CI
131 |
132 | .. |docs_ci| image:: https://github.com/gilesknap/gphotos-sync/workflows/Docs%20CI/badge.svg?branch=main
133 | :target: https://github.com/gilesknap/gphotos-sync/actions?query=workflow%3A%22Docs+CI%22
134 | :alt: Docs CI
135 |
136 | .. |coverage| image:: https://codecov.io/gh/gilesknap/gphotos-sync/branch/main/graph/badge.svg
137 | :target: https://codecov.io/gh/gilesknap/gphotos-sync
138 | :alt: Test Coverage
139 |
140 | .. |pypi_version| image:: https://img.shields.io/pypi/v/gphotos-sync.svg
141 | :target: https://pypi.org/project/gphotos-sync
142 | :alt: Latest PyPI version
143 |
144 | .. |license| image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg
145 | :target: https://opensource.org/licenses/Apache-2.0
146 | :alt: Apache License
147 |
148 | ..
149 | Anything below this line is used when viewing README.rst and will be replaced
150 | when included in index.rst
151 |
152 | See https://gilesknap.github.io/gphotos-sync for more detailed documentation.
153 |
154 | Alternatives
155 | ============
156 |
157 | For a commercial product that backs up your library to the cloud see:
158 | See https://photovaultone.com/
159 |
160 |
--------------------------------------------------------------------------------
/docs/.notes:
--------------------------------------------------------------------------------
1 | System Tests
2 | ============
3 |
4 | The system tests use a real Google Account with its own Photos Library.
5 |
6 | This account is called gphotos.sync.test@gmail.com
7 |
8 | It's password uses giles' password scheme C
9 |
10 | (I'd like this to be in public domain for use by other contributors but I
11 | don't believe there is a secure way to do this.)
--------------------------------------------------------------------------------
/docs/_static/theme_overrides.css:
--------------------------------------------------------------------------------
1 | /* override table width restrictions */
2 | @media screen and (min-width: 639px) {
3 | .wy-table-responsive table td {
4 | /* !important prevents the common CSS stylesheets from
5 | overriding this as on RTD they are loaded after this stylesheet */
6 | white-space: normal !important;
7 | }
8 | }
9 |
10 | /* override table padding */
11 | .rst-content table.docutils th, .rst-content table.docutils td {
12 | padding: 4px 6px;
13 | }
14 |
15 | /* Add two-column option */
16 | @media only screen and (min-width: 1000px) {
17 | .columns {
18 | padding-left: 10px;
19 | padding-right: 10px;
20 | float: left;
21 | width: 50%;
22 | min-height: 145px;
23 | }
24 | }
25 |
26 | .endcolumns {
27 | clear: both
28 | }
29 |
30 | /* Hide toctrees within columns and captions from all toctrees.
31 | This is what makes the include trick in index.rst work */
32 | .columns .toctree-wrapper, .toctree-wrapper .caption-text {
33 | display: none;
34 | }
35 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/main/usage/configuration.html
6 |
7 | import gphotos_sync
8 |
9 | # -- General configuration ------------------------------------------------
10 |
11 | # General information about the project.
12 | project = "gphotos-sync"
13 |
14 | # The full version, including alpha/beta/rc tags.
15 | release = gphotos_sync.__version__
16 |
17 | # The short X.Y version.
18 | if "+" in release:
19 | # Not on a tag
20 | version = "main"
21 | else:
22 | version = release
23 |
24 | extensions = [
25 | # Use this for generating API docs
26 | "sphinx.ext.autodoc",
27 | # This can parse google style docstrings
28 | "sphinx.ext.napoleon",
29 | # For linking to external sphinx documentation
30 | "sphinx.ext.intersphinx",
31 | # Add links to source code in API docs
32 | "sphinx.ext.viewcode",
33 | # Adds the inheritance-diagram generation directive
34 | "sphinx.ext.inheritance_diagram",
35 | ]
36 |
37 | # If true, Sphinx will warn about all references where the target cannot
38 | # be found.
39 | nitpicky = False
40 |
41 | # A list of (type, target) tuples (by default empty) that should be ignored when
42 | # generating warnings in "nitpicky mode". Note that type should include the
43 | # domain name if present. Example entries would be ('py:func', 'int') or
44 | # ('envvar', 'LD_LIBRARY_PATH').
45 | nitpick_ignore = [("py:func", "int", "py:class")]
46 |
47 | # Both the class’ and the __init__ method’s docstring are concatenated and
48 | # inserted into the main body of the autoclass directive
49 | autoclass_content = "both"
50 |
51 | # Order the members by the order they appear in the source code
52 | autodoc_member_order = "bysource"
53 |
54 | # Don't inherit docstrings from baseclasses
55 | autodoc_inherit_docstrings = False
56 |
57 | # Output graphviz directive produced images in a scalable format
58 | graphviz_output_format = "svg"
59 |
60 | # The name of a reST role (builtin or Sphinx extension) to use as the default
61 | # role, that is, for text marked up `like this`
62 | default_role = "any"
63 |
64 | # The suffix of source filenames.
65 | source_suffix = ".rst"
66 |
67 | # The main toctree document.
68 | main_doc = "index"
69 |
70 | # List of patterns, relative to source directory, that match files and
71 | # directories to ignore when looking for source files.
72 | # These patterns also affect html_static_path and html_extra_path
73 | exclude_patterns = ["_build"]
74 |
75 | # The name of the Pygments (syntax highlighting) style to use.
76 | pygments_style = "sphinx"
77 |
78 | # This means you can link things like `str` and `asyncio` to the relevant
79 | # docs in the python documentation.
80 | intersphinx_mapping = {"python": ("https://docs.python.org/3/", None)}
81 |
82 | # A dictionary of graphviz graph attributes for inheritance diagrams.
83 | inheritance_graph_attrs = {"rankdir": "TB"}
84 |
85 | # Common links that should be available on every page
86 | rst_epilog = """
87 | .. _Diamond Light Source:
88 | http://www.diamond.ac.uk
89 | """
90 |
91 | # Ignore localhost links for period check that links in docs are valid
92 | linkcheck_ignore = [r"http://localhost:\d+/"]
93 |
94 | # -- Options for HTML output -------------------------------------------------
95 |
96 | # The theme to use for HTML and HTML Help pages. See the documentation for
97 | # a list of builtin themes.
98 | #
99 | html_theme = "sphinx_rtd_theme_github_versions"
100 |
101 | # Options for the sphinx rtd theme, use DLS blue
102 | html_theme_options = {"style_nav_header_background": "rgb(7, 43, 93)"}
103 |
104 | # Add any paths that contain custom static files (such as style sheets) here,
105 | # relative to this directory. They are copied after the builtin static files,
106 | # so a file named "default.css" will overwrite the builtin "default.css".
107 | html_static_path = ["_static"]
108 |
109 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
110 | html_show_sphinx = False
111 |
112 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
113 | html_show_copyright = False
114 |
115 | # Add some CSS classes for columns and other tweaks in a custom css file
116 | html_css_files = ["theme_overrides.css"]
117 |
118 | # Logo
119 | html_logo = "images/logo.png"
120 | html_favicon = "images/logo.png"
121 |
--------------------------------------------------------------------------------
/docs/explanations.rst:
--------------------------------------------------------------------------------
1 | :orphan:
2 |
3 | Explanations
4 | ============
5 |
6 | Explanation of how the library works and why it works that way.
7 |
8 | .. toctree::
9 | :caption: Explanations
10 |
11 | explanations/folders
12 | explanations/notes
13 | explanations/tokens
14 | explanations/googleapiissues
15 |
--------------------------------------------------------------------------------
/docs/explanations/folders.rst:
--------------------------------------------------------------------------------
1 | .. _Folders:
2 |
3 | Folder Layout
4 | =============
5 |
6 | After doing a full sync you will have 2 directories off of the specified root:
7 |
8 | Media Files
9 | -----------
10 |
11 | photos:
12 | contains all photos and videos from your Google Photos Library organized
13 | into folders with the structure 'photos/YYYY/MM' where 'YYYY/MM' is
14 | the date the photo/video was taken. The filenames within a folder will
15 | be as per the original upload except that duplicate names will have a
16 | suffix ' (n)' where n is the duplicate number of the file (this matches
17 | the approach used in the official Google tool for Windows).
18 |
19 | albums:
20 | contains a folder hierarchy representing the set of albums and shared
21 | albums in your library. All the files are symlinks to content in the photos
22 | folder. The folder names will be 'albums/YYYY/MM Original Album Name'.
23 |
24 | Note that these are the default layouts and you can change what is downloaded
25 | and how it is laid out with command line options. See the help for details::
26 |
27 | gphotos-sync --help
28 |
29 | Other Files
30 | -----------
31 |
32 | The following files will also appear in the root folder:-
33 |
34 | - gphotos.sqlite: the database that tracks what files have been indexed,
35 | you can open this with sqlite3 to examine what media and metadata you have.
36 | - gphotos.log: a log of the most recent run, including debug info
37 | - gphotos.lock: the lock file used to make sure only one gphotos-sync runs
38 | at a time
39 | - gphotos.trace: a trace file if logging is set to trace level. This logs
40 | the calls to the Google Photos API.
41 | - gphotos.bad_ids.yaml: A list of bad entries that cause the API to get
42 | an error when downloading. Delete this file to retry downloading these
43 | bad items.
44 |
--------------------------------------------------------------------------------
/docs/explanations/googleapiissues.rst:
--------------------------------------------------------------------------------
1 | Known Issues
2 | ============
3 |
4 |
5 | Known Issues with Google API
6 | ----------------------------
7 | A few outstanding limitations of the Google API restrict what can be achieved.
8 | All these issues have been reported to Google and this project will be updated
9 | once they are resolved.
10 |
11 | Unfortunately, a number of quite significant issues have remained unfixed for
12 | several years. I'm starting a discussion group in this repo for people to
13 | discuss workarounds, and collect reports of how these things are affecting
14 | users. I intend to link this discussion group in the Google issue trackers
15 | to see if it will encourage some response from Google.
16 |
17 | To join the discussion go here
18 | https://github.com/gilesknap/gphotos-sync/discussions.
19 |
20 | Pending API Issues
21 | ~~~~~~~~~~~~~~~~~~
22 |
23 | - There is no way to discover modified date of library media items. Currently
24 | ``gphotos-sync`` will refresh your local copy with any new photos added since
25 | the last scan but will not update any photos that have been modified in
26 | Google Photos.
27 |
28 | - https://issuetracker.google.com/issues/122737849.
29 |
30 |
31 | - GOOGLE WON'T FIX. The API strips GPS data from images.
32 |
33 | - https://issuetracker.google.com/issues/80379228.
34 |
35 | - Video download transcodes the videos even if you ask for the original file
36 | (=vd parameter). My experience is that the result is looks similar to the original
37 | but the compression is more clearly visible. It is a smaller file with
38 | approximately 60% bitrate (same resolution).
39 |
40 | - https://issuetracker.google.com/issues/80149160
41 |
42 | - Photo download compresses the photos even if you ask for the original file
43 | (=d parameter). This is similar to the above issue, except in my experience
44 | is is nearly impossible to notice a loss in quality. It
45 | is a file compressed to approximately 60% of the original size (same resolution).
46 |
47 | - https://issuetracker.google.com/issues/112096115
48 |
49 | - Burst shots are not supported. You will only see the first file of a burst shot.
50 |
51 | - https://issuetracker.google.com/issues/124656564
52 |
53 | Fixed API Issues
54 | ~~~~~~~~~~~~~~~~
55 | - FIXED BY GOOGLE. Some types of video will not download using the new API.
56 |
57 | - https://issuetracker.google.com/issues/116842164.
58 | - https://issuetracker.google.com/issues/141255600
59 |
60 | Other Issues
61 | ------------
62 | - Some mounted filesystems including NFS, CIFS and AFP do not support file locks
63 | and database access will fail on them.
64 |
65 | - To fix, use the parameter --db-path to specify a location for your DB on
66 | the local disk. This will perform better anyway.
67 |
68 | GPS workaround
69 | --------------
70 | For a workaround to the GPS issue described below see this project
71 | https://github.com/DennyWeinberg/manager-for-google-photos
--------------------------------------------------------------------------------
/docs/explanations/notes.rst:
--------------------------------------------------------------------------------
1 | Why is Google Photos Sync Read Only
2 | ===================================
3 |
4 | Google Photos Sync is a backup tool only. It never makes any changes to your
5 | Google Photos Library in the cloud. There are two primary reasons for this:
6 |
7 | - The Photos API provided by Google is far too restricted to make changes
8 | in any meaningful way. For example
9 |
10 | - there is no delete function
11 | - you cannot add photos to an album unless it was created by the same
12 | application that is trying to add photos
13 |
14 | - Even if the API allowed it, this would be a very hard problem, because
15 | it is often hard to identify if a local photo or video matches one in the
16 | cloud. Besides this, I would not want the responsibility of potentially
17 | trashing someone's photo collection.
18 |
--------------------------------------------------------------------------------
/docs/explanations/tokens.rst:
--------------------------------------------------------------------------------
1 | .. _Tokens:
2 |
3 | Google OAuth Tokens for gphotos-sync
4 | ====================================
5 |
6 | Introduction
7 | ------------
8 |
9 | There are two kinds of authentication required to run gphotos-sync.
10 |
11 | - First the application must authenticate with Google to authorize the use
12 | of the Google Photos API. This gives it permission to perform the
13 | second authentication step.
14 | - Second, an individual User Account must me authenticated to allow access
15 | to that user's Google Photos Library.
16 |
17 | The secret information that enables these authentication steps is held in
18 | two files:
19 |
20 | - client_secret.json holds the OAuth application ID that allows the
21 | first step. This is stored in an application configuration folder.
22 | There is only one of these files per installation of gphotos-sync.
23 | See `Client ID` for details of creating this file.
24 | - .gphotos.token holds the user token for each user you are backing up
25 | photos for. This resides in the root folder of the library backup.
26 | See `Login` for details of creating this file.
27 |
28 | Why Do We Need Client ID ?
29 | --------------------------
30 |
31 | The expected use of the client ID is that a vendor provides a single ID
32 | for their application, Google verifies the application and then anyone
33 | can use it.
34 |
35 | In this scenario ALL Google API calls would count against the vendor's
36 | account. They would be charged for use of those APIs and they would
37 | need to charge their users to make this worthwhile.
38 |
39 | If I was to provide my own client ID with gphotos-sync then I would need
40 | to charge a subscription to cover API costs.
41 |
42 | Since this is FOSS I ask every user to create their own client ID
43 | so they can take advantage of the free tier of Google API use that is
44 | available to every user.
45 |
46 | Most normal use of gphotos-sync does not exceed the free tier. If it does
47 | you will not be charged. The code is supposed to throttle back and go slower
48 | to drop back into the free usage rate. However there is an issue with this
49 | feature at present and you will likely see an error:
50 |
51 | ``429 Client Error: Too Many Requests for url``.
52 |
53 | See https://github.com/gilesknap/gphotos-sync/issues/320,
54 | https://github.com/gilesknap/gphotos-sync/issues/202 for details and
55 | workarounds.
56 |
--------------------------------------------------------------------------------
/docs/how-to.rst:
--------------------------------------------------------------------------------
1 | :orphan:
2 |
3 | How-to Guides
4 | =============
5 |
6 | Practical step-by-step guides for the more experienced user.
7 |
8 | .. toctree::
9 | :caption: How-to Guides
10 |
11 | how-to/windows
12 | how-to/cron
13 | how-to/comparison
14 |
--------------------------------------------------------------------------------
/docs/how-to/comparison.rst:
--------------------------------------------------------------------------------
1 | Comparing The Google Photos Library With Local files
2 | ====================================================
3 |
4 | .. warning::
5 | This feature is deprecated. Working out if files in the filesystem match
6 | those in the Google Library more of an art than a science. I used this
7 | feature to prove that gphotos-sync had worked on my library when I fully
8 | committed to Google Photos in 2015 and it has not been touched since. It uses
9 | complicated SQL functions to do the comparison and is probably not working
10 | anymore.
11 |
12 | I'm leaving it enabled in case anyone wants to have a go or if any
13 | contributors want to resurrect it. But I'm not supporting this feature
14 | anymore.
15 |
16 | There will be additional folders created when using the --compare-folder option.
17 |
18 | The option is used to make a
19 | comparison of the contents of your library with a local folder such as a previous backup. The comparison does not require
20 | that the files are arranged in the same folders, it uses meta-data in the files such as create date and
21 | exif UID to match pairs of items. The additional folders after a comparison will be:
22 |
23 | * **comparison** a new folder off of the specified root containing the following:
24 |
25 | * **missing_files** - contains symlinks to the files in the comparison folder that were not found in the Google
26 | Photos Library. The folder structure is the same as that in the comparison folder. These are the
27 | files that you would upload to Google Photos via the Web interface to restore from backup.
28 |
29 | * **extra_files** - contains symlinks into to the files in photos folder which appear in the Library but not in the
30 | comparison folder. The folder structure is the same as the photos folder.
31 |
32 | * **duplicates** - contains symlinks to any duplicate files found in the comparison folder. This is a flat structure
33 | and the symlink filenames have a numeric prefix to make them unique and group the duplicates together.
34 |
35 | NOTES:
36 |
37 | * the comparison code uses an external tool 'ffprobe'. It will run without it but will not be able to
38 | extract metadata from video files and revert to relying on Google Photos meta data and file modified date (this is
39 | a much less reliable way to match video files, but the results should be OK if the backup folder
40 | was originally created using gphotos-sync).
41 | * If you have shared albums and have clicked 'add to library' on items from others' libraries then you will have two
42 | copies of those items and they will show as duplicates too.
43 |
--------------------------------------------------------------------------------
/docs/how-to/cron.rst:
--------------------------------------------------------------------------------
1 | Scheduling a Regular Backup
2 | ---------------------------
3 | On linux you can add gphotos-sync to your cron schedule easily. See https://crontab.guru/
4 | for tips on how to configure regular execution of a command. You will need a script that
5 | looks something like this::
6 |
7 | #!/bin/bash
8 | /bin/python gphotos-sync $@ >> /gphotos_full.log --logfile /tmp 2>&1
9 |
10 | gphotos-sync uses a lockfile so that if a cron job starts while a previous one
11 | is still running then the 2nd instance will abort.
12 |
13 | Note that cron does not have access to your profile so none of the usual
14 | environment variables are available.
--------------------------------------------------------------------------------
/docs/how-to/windows.rst:
--------------------------------------------------------------------------------
1 | .. _Windows:
2 |
3 | Additional Setup for Windows Machines
4 | =====================================
5 |
6 | Python
7 | ------
8 |
9 | To install python on Windows.
10 |
11 | - Open a command prompt with "Windows-Key CMD "
12 | - Type 'python'
13 | - This will take you to the Microsoft Store and prompt you to install python
14 | - When complete return to the command prompt
15 | - Type 'pip install gphotos-sync'
16 |
17 | You can now run using the following but replacing with your username
18 | and with the python version installed (look in the Packages folder
19 | to find the full VERSION):
20 |
21 | ``C:\Users\\AppData\Local\Packages\PythonSoftwareFoundation.Python.\LocalCache\local-packages\Python310\Scripts\gphotos-sync.exe``
22 |
23 | As an alternative to typing the full path you can add the Scripts folder
24 | to your path. See
25 | https://www.architectryan.com/2018/03/17/add-to-the-path-on-windows-10/.
26 |
27 | Using the installer downloadable from https://www.python.org/downloads/ will have
28 | the same effect and includes a checkbox to add python to your Windows Path.
29 |
30 | Virtual Environment
31 | -------------------
32 | It is recommended you create a virtual environment to run you python code in to
33 | avoid messing up your root python install. In the below example we create a virtual
34 | environment on the desktop. In the below example we assume that python has been
35 | added to your window path variable as above.
36 |
37 | - Create a new folder on your desktop called 'GPhotosSync'
38 | - Hold shift and right click on your desktop and click 'Open PowerShell window here'
39 | - type ``python -m venv GPhotosSync`` this will create a virtual environment
40 | - next activate the environment using the command ``.\GPhotosSync\Scripts\activate.ps1``
41 | - you can then install gphotos-sync using the command ``pip install gphotos-sync``
42 | - You run it the same way as listed above. But now you need to activate the virtual environment every time you run it.
43 |
44 | Symlinks
45 | --------
46 |
47 | Album information is created as a set of folders with symbolic links into
48 | the photos folder. Windows supports symbolic links but it is turned off by default.
49 | You can either turn it on for your account or you can use the operation
50 | ``--skip-albums``.
51 |
52 | To enable symbolic links permission for the account that gphoto-sync
53 | will run under, see `Enabling SymLinks on Windows`_.
54 |
55 | .. _`Enabling SymLinks on Windows`: https://community.perforce.com/s/article/3472
56 |
57 | Alternative approach
58 | --------------------
59 | To avoid fiddling with symlinks and python paths you could try WSL2.
60 |
61 | This project was developed in Linux, so if you would like to get the
62 | native experience I recommend installing WSL2 and Ubuntu.
63 | This gives you a linux environment inside of your Windows OS and
64 | handles command line installation of python and python applications
65 | in a far cleaner way.
66 |
67 | The integration
68 | if particularly good on Windows 11.
69 | See https://docs.microsoft.com/en-us/windows/wsl/install.
70 |
71 | .. _WindowsDocker:
72 |
73 | Initial Setup on Windows for Docker desktop
74 | ===========================================
75 |
76 | If you want to run the app in a container then there are some additional
77 | steps required on Windows.
78 |
79 | First you need to have installed Docker Desktop from
80 | https://www.docker.com/products/docker-desktop/
81 |
82 | - make sure leave ticked 'use WSL2 instead of Hyper V'
83 | - if you already have docker installed with Hyper V consider re-installing with
84 | WSL2
85 |
86 |
87 |
88 |
89 |
--------------------------------------------------------------------------------
/docs/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/images/logo.png
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 | :end-before: when included in index.rst
3 |
4 |
5 | How the documentation is structured
6 | -----------------------------------
7 |
8 | Documentation is split into four categories, also accessible from links in the
9 | side-bar.
10 |
11 | .. rst-class:: columns
12 |
13 | `tutorials`
14 | ~~~~~~~~~~~
15 |
16 | .. include:: tutorials.rst
17 | :start-after: =========
18 |
19 | .. rst-class:: columns
20 |
21 | `how-to`
22 | ~~~~~~~~
23 |
24 | .. include:: how-to.rst
25 | :start-after: =============
26 |
27 | .. rst-class:: columns
28 |
29 | `explanations`
30 | ~~~~~~~~~~~~~~
31 |
32 | .. include:: explanations.rst
33 | :start-after: ============
34 |
35 | .. rst-class:: columns
36 |
37 | `reference`
38 | ~~~~~~~~~~~
39 |
40 | .. include:: reference.rst
41 | :start-after: =========
42 |
43 | .. rst-class:: endcolumns
44 |
45 | About the documentation
46 | ~~~~~~~~~~~~~~~~~~~~~~~
47 |
48 | `Why is the documentation structured this way? `_
49 |
--------------------------------------------------------------------------------
/docs/reference.rst:
--------------------------------------------------------------------------------
1 | :orphan:
2 |
3 | Reference
4 | =========
5 |
6 | Detailed information including autogenerated code documentation.
7 |
8 | .. toctree::
9 | :caption: Reference
10 |
11 | reference/contributing
12 | reference/api
13 | Releases
14 |
--------------------------------------------------------------------------------
/docs/reference/api.rst:
--------------------------------------------------------------------------------
1 | API
2 | ===
3 |
4 | .. automodule:: gphotos_sync
5 |
6 | ``gphotos_sync``
7 | -----------------------------------
8 |
9 | This is the internal code reference for gphotos_sync
10 |
11 | .. data:: gphotos_sync.__version__
12 | :type: str
13 |
14 | Version number as calculated by setuptools_scm
15 |
16 | .. automodule:: gphotos_sync.__main__
17 | :members:
18 |
19 | ``gphotos_sync.__main__``
20 | -----------------------------------------
21 | .. automodule:: gphotos_sync.BaseMedia
22 | :members:
23 |
24 | ``gphotos_sync.BaseMedia``
25 | -----------------------------------------
26 | .. automodule:: gphotos_sync.DatabaseMedia
27 | :members:
28 |
29 | ``gphotos_sync.DatabaseMedia``
30 | -----------------------------------------
31 | .. automodule:: gphotos_sync.GooglePhotosMedia
32 | :members:
33 |
34 | ``gphotos_sync.GooglePhotosMedia``
35 | -----------------------------------------
36 | .. automodule:: gphotos_sync.GoogleAlbumMedia
37 | :members:
38 |
39 | ``gphotos_sync.GoogleAlbumMedia``
40 | .. automodule:: gphotos_sync.LocalFilesMedia
41 | :members:
42 |
43 | ``gphotos_sync.LocalFilesMedia``
44 | -----------------------------------------
45 | .. automodule:: gphotos_sync.GooglePhotosDownload
46 | :members:
47 |
48 | ``gphotos_sync.GooglePhotosDownload``
49 | -----------------------------------------
50 | .. automodule:: gphotos_sync.GooglePhotosIndex
51 | :members:
52 |
53 | ``gphotos_sync.GooglePhotosIndex``
54 | -----------------------------------------
--------------------------------------------------------------------------------
/docs/reference/contributing.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../../CONTRIBUTING.rst
2 |
--------------------------------------------------------------------------------
/docs/tutorials.rst:
--------------------------------------------------------------------------------
1 | :orphan:
2 |
3 | Tutorials
4 | =========
5 |
6 | Tutorials for installation, library and commandline usage. New users start here.
7 |
8 | .. toctree::
9 | :caption: Tutorials
10 |
11 | tutorials/installation
12 | tutorials/oauth2
13 | tutorials/login
14 |
--------------------------------------------------------------------------------
/docs/tutorials/installation.rst:
--------------------------------------------------------------------------------
1 | .. _Tutorial:
2 |
3 | Initial Setup
4 | =============
5 |
6 | Before you run gphotos_sync for the first time you will need to create
7 | your own OAuth client ID. This is a once only operation and the instructions
8 | are here: `Client ID`.
9 |
10 | - Once the client ID is created, download it as ``client_secret.json`` and save
11 | it under the application configuration directory:
12 |
13 | - ``~/Library/Application Support/gphotos-sync/`` under Mac OS X,
14 | - ``~/.config/gphotos-sync/`` under Linux,
15 | - ``C:\Users\\AppData\Local\gphotos-sync\gphotos-sync\`` under Windows.
16 |
17 | If you are running Windows, also see `Windows`.
18 |
19 | You are ready to run gphotos-sync for the first time, either locally or
20 | inside of a container. The first run will require a user login see
21 | `Login`
22 |
23 | .. _Container:
24 |
25 | Execute in a container
26 | ======================
27 |
28 | This project now automatically releases a container image with each release.
29 | The latest image will be here ``ghcr.io/gilesknap/gphotos-sync``.
30 |
31 | Your container has two volumes ``/config`` for the client_secret.json file and
32 | ``/storage`` for the backup data. You should map these to host folders if you
33 | want to see them outside of the container.
34 |
35 | Hence the typical way to launch the container with docker runtime would be::
36 |
37 | $ CONFIG=$HOME/.config/gphotos-sync
38 | $ STORAGE=$HOME/My_photos_backup
39 | $ docker run --rm -v $CONFIG:/config -v $STORAGE:/storage -p 8080:8080 -it ghcr.io/gilesknap/gphotos-sync /storage
40 |
41 | The options ``-p 8080:8080 -it`` are required for the first invocation only,
42 | so that the browser can find authentication service.
43 |
44 | Note that the authentication flow uses a redirect url that sends authentication
45 | token back to the process. The default redirect is localhost:8080 and you can
46 | adjust the port with ``--port``.
47 |
48 | Headless gphotos-sync Servers
49 | -----------------------------
50 |
51 | The authentication
52 | flow only allows localhost for security reasons so the first run must always
53 | be done on a machine with a browser.
54 |
55 | If you are running on a NAS or other headless server you will first
56 | need to run locally so that you can do initial login flow with a browser.
57 | Then copy /.gphotos.token to the server. For this
58 | first run you could use the following options so that no backup is performed:
59 |
60 | ``--skip-files --skip-albums --skip-index``
61 |
62 |
63 | Local Installation
64 | ==================
65 |
66 | To install on your workstation (linux Mac or Windows) follow this section.
67 |
68 | Check your version of python
69 | ----------------------------
70 |
71 | You will need python 3.7 or later. You can check your version of python by
72 | typing into a terminal::
73 |
74 | python3 --version
75 |
76 |
77 | Create a virtual environment
78 | ----------------------------
79 |
80 | It is recommended that you install into a “virtual environment” so this
81 | installation will not interfere with any existing Python software::
82 |
83 | python3 -m venv /path/to/venv
84 | source /path/to/venv/bin/activate
85 |
86 |
87 | Install gphotos-sync
88 | --------------------
89 |
90 | You can now use ``pip`` to install the application::
91 |
92 | python3 -m pip install gphotos-sync
93 |
94 | If you require a feature that is not currently released you can also install
95 | directly from github::
96 |
97 | python3 -m pip install git+https://github.com/gilesknap/gphotos-sync.git
98 |
99 | The application should now be installed and the commandline interface on your path.
100 | You can check the version that has been installed by typing::
101 |
102 | gphotos-sync --version
103 |
104 | Running gphotos-sync
105 | ====================
106 |
107 | To begin a backup with default settings create a new empty TARGET DIRECTORY
108 | in which your backups will go and type::
109 |
110 | gphotos-sync
111 |
112 | For the first invocation you will need login the user whose files you
113 | are backing up, see `Login`.
114 |
115 | Once this process has started it will first index all of your library and then
116 | start a download of the files. The download is multithreaded and will use
117 | most of your internet bandwidth, you can throttle it if needed using the
118 | ``--threads`` option.
119 |
120 | For a description of additional command line parameters type::
121 |
122 | gphotos-sync --help
123 |
124 | As the download progresses it will create folders and files in your target
125 | directory. The layout of these is described in `Folders`.
126 |
127 | Next time you run gphotos-sync it will incrementally download all new files
128 | since the previous. It is OK to abort gphotos-sync and restart it, this will
129 | just cause it to continue from where the abort happened.
130 |
--------------------------------------------------------------------------------
/docs/tutorials/login-images/01-sign-in.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/login-images/01-sign-in.png
--------------------------------------------------------------------------------
/docs/tutorials/login-images/02-verify.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/login-images/02-verify.png
--------------------------------------------------------------------------------
/docs/tutorials/login-images/03-verify2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/login-images/03-verify2.png
--------------------------------------------------------------------------------
/docs/tutorials/login-images/04-access.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/login-images/04-access.png
--------------------------------------------------------------------------------
/docs/tutorials/login.rst:
--------------------------------------------------------------------------------
1 | .. _Login:
2 |
3 | User Login OAuth Flow
4 | =====================
5 |
6 | The first time you run gphoto-sync against a given root folder
7 | you will be prompted to log in as the user who's photos you wish to
8 | backup into that folder.
9 |
10 | For a discussion of authentication see `Tokens`.
11 |
12 | The User token will be saved in the root folder (as ``.gphotos.token``) and
13 | you will not be prompted again. Although the token expires it is configured
14 | to refresh automatically.
15 |
16 | Note that this means you can backup multiple users on the same machine
17 | simply by specifying a different folder for each user.
18 |
19 | When you first run gphotos sync it must be on a workstation that you can
20 | run a browser on. This is because the authentication flow uses a temporary local
21 | web server and Google's login pages.
22 |
23 | On first run (or when the .gphotos.token has been deleted) you will see
24 | output like this:
25 |
26 | ```Please visit this URL to authorize this application: https://accounts.google.com/o/oauth2/auth?re... [url clipped]```
27 |
28 | Most modern terminals will let you click on the link to launch your
29 | browser, if this does not work then copy the URL and paste into your
30 | browser.
31 |
32 | You should now see:
33 |
34 | .. image:: login-images/01-sign-in.png
35 | :align: center
36 | :scale: 75 %
37 |
38 | Choose the user you want to backup photos for. Or pick 'Use Another Account'
39 | if they are not shown.
40 |
41 | You will be presented with a warning because you have made your own client ID
42 | which is not verified by Google. This is expected for this application (see
43 | `Tokens`).
44 |
45 | .. image:: login-images/02-verify.png
46 | :align: center
47 | :scale: 75 %
48 |
49 | Click 'Advanced' and then 'go to gphotos-sync (unsafe)'.
50 |
51 | .. image:: login-images/03-verify2.png
52 | :align: center
53 | :scale: 75 %
54 |
55 | The next screen shows you the permissions you may grant to this application.
56 | Tick all the boxes and click 'Continue'. The command line gphotos-sync will
57 | then continue to run and start to backup the user's library.
58 |
59 | .. image:: login-images/04-access.png
60 | :align: center
61 | :scale: 75 %
62 |
63 |
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/0.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/1.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/10-test_users.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/10-test_users.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/11-summary.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/11-summary.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/12-create_creds.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/12-create_creds.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/14-create_id.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/14-create_id.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/15-created.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/15-created.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/2.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/3.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/4.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/5.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/6.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/7-oauth_concent.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/7-oauth_concent.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/8-app_registration.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/8-app_registration.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth-images/9-scopes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/docs/tutorials/oauth-images/9-scopes.png
--------------------------------------------------------------------------------
/docs/tutorials/oauth2.rst:
--------------------------------------------------------------------------------
1 | .. _Client ID:
2 |
3 | Creating an OAuth Client ID
4 | ===========================
5 |
6 | Overview
7 | --------
8 |
9 | .. contents::
10 |
11 | This document will take you through the steps needed to set up a Google Cloud
12 | project and create an OAuth client ID for gphoto-sync.
13 |
14 | For a discussion of the purpose of this ID see `Tokens`.
15 |
16 | Each step here assumes that you're logged into a personal Google account.
17 |
18 | .. note::
19 | The steps outlined here are correct as of May 2022. Their is quite a
20 | bit of churn in the Google Cloud Console UI so the screens may change a
21 | bit.
22 |
23 |
24 | Create a Google Cloud Project
25 | -----------------------------
26 |
27 | #. Head to https://console.cloud.google.com
28 |
29 | * If you have not set up Google Cloud before, select your country
30 | and agree the to the Terms of Service.
31 |
32 | .. image:: oauth-images/0.png
33 | :align: center
34 | :scale: 100 %
35 |
36 | #. In the top banner the currently selected project is shown. If you have
37 | no previous projects this will say 'Select a project'
38 |
39 | * Click on the current project name or 'Select a project'
40 |
41 | * This will bring up the 'Select a Project' dialog
42 |
43 | .. image:: oauth-images/1.png
44 | :align: center
45 | :scale: 100
46 |
47 | #. Press **New Project**.
48 | #. Enter a project name. For example, "gphotos". This name must be unique
49 | within your account and cannot be changed in the future.
50 | #. Leave **Location** as the default "No Organization".
51 | #. Press Create.
52 |
53 | .. image:: oauth-images/2.png
54 | :align: center
55 | :scale: 100
56 |
57 |
58 |
59 | Enable the Photos API
60 | ---------------------
61 |
62 | #. Ensure that the project you made above is the active project.
63 | #. Click on the top-left hamburger menu and find **APIs & Services** > **Library**.
64 |
65 | .. image:: oauth-images/3.png
66 | :align: center
67 | :scale: 75
68 |
69 | #. Search for the **Photos Library API** by Google and select it.
70 | .. image:: oauth-images/4.png
71 | :align: center
72 | :scale: 75
73 |
74 | #. Enable it.
75 |
76 | .. image:: oauth-images/5.png
77 | :align: center
78 | :scale: 75
79 |
80 | Configure OAuth Consent
81 | -----------------------
82 |
83 | #. Find **APIs & Services** > **OAuth consent screen**
84 |
85 | .. image:: oauth-images/6.png
86 | :align: center
87 | :scale: 75
88 |
89 | #. Set **User Type** to External.
90 | #. Press Create
91 |
92 | .. image:: oauth-images/7-oauth_concent.png
93 | :align: center
94 | :scale: 75
95 |
96 | #. App Registration - OAuth consent screen:
97 |
98 | #. Set your **App Name**. For example, "gphotos". Note that this does
99 | **not** have to be the same as the project name. Do not include "Google"
100 | in the name or this will fail.
101 | #. Enter your email address as the **User support email**.
102 | #. Enter your email address as the **Developer contact information**.
103 | #. Leave all other fields.
104 | #. Press **Save and Continue**.
105 |
106 | .. image:: oauth-images/8-app_registration.png
107 | :align: center
108 | :scale: 75
109 |
110 | #. App Registration - Scopes
111 |
112 | #. Nothing is *needed* here - you can just ignore everything and press
113 | **Save and Continue**.
114 |
115 | .. image:: oauth-images/9-scopes.png
116 | :align: center
117 | :scale: 75
118 |
119 | #. App Registration - Test Users:
120 |
121 | #. Nothing needed here as you are going to publish the project. This means
122 | it will no longer be in the testing state.
123 | #. Press **Save and Continue**.
124 |
125 | .. image:: oauth-images/10-test_users.png
126 | :align: center
127 | :scale: 75
128 |
129 | #. Summary
130 |
131 | #. You will now see a summary screen like this
132 | #. Review the summary and press **Back to Dashboard**.
133 |
134 | .. image:: oauth-images/11-summary.png
135 | :align: center
136 | :scale: 75
137 |
138 |
139 | Create the OAuth Credentials
140 | ----------------------------
141 |
142 | #. Find **APIs & Services** > **Credentials**
143 | #. Press **+ Create Credentials** and select **OAuth client ID**.
144 |
145 |
146 | .. image:: oauth-images/12-create_creds.png
147 | :align: center
148 | :scale: 75
149 |
150 | #. Choose Desktop App
151 | #. Choose name for your credentials e.g. gphotos
152 | #. Click **Create**
153 |
154 | .. image:: oauth-images/14-create_id.png
155 | :align: center
156 | :scale: 75
157 |
158 | #. Click **Download JSON** to download the OAuth client ID as JSON and
159 | save it as ``client_secret.json``.
160 |
161 | .. image:: oauth-images/15-created.png
162 | :align: center
163 | :scale: 75
164 |
165 |
166 | Publish the App
167 | ---------------
168 |
169 | .. important::
170 |
171 | Failure to publish the app will result in your auth token expiring after
172 | **7 days**. See `the Google Cloud docs`_ and `Issue #290`_ for details.
173 |
174 | #. Head to **APIs & Services** > **OAuth consent screen**
175 | #. Press **Publish App**.
176 | #. Read the notice and press **Confirm**.
177 |
178 |
179 | At this point you should be able to run ``gphotos-sync`` using the instructions
180 | found in the README_.
181 |
182 | .. _`the Google Cloud docs`: https://developers.google.com/identity/protocols/oauth2#expiration
183 | .. _`Issue #290`: https://github.com/gilesknap/gphotos-sync/issues/290
184 | .. _README: https://github.com/gilesknap/gphotos-sync/blob/main/README.rst
185 |
186 |
187 | Move client_secret.json
188 | -----------------------
189 |
190 | #. The client_secret.json must be moved to the correct location
191 | #. Each supported operating system has a different location where it will
192 | look for this file.
193 | #. Return the `Tutorial` for details of where to put this file.
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2", "wheel"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [project]
6 | name = "gphotos-sync"
7 | classifiers = [
8 | "Development Status :: 3 - Alpha",
9 | "License :: OSI Approved :: Apache Software License",
10 | "Programming Language :: Python :: 3.7",
11 | "Programming Language :: Python :: 3.8",
12 | "Programming Language :: Python :: 3.9",
13 | "Programming Language :: Python :: 3.10",
14 | "Programming Language :: Python :: 3.11",
15 | ]
16 | description = "Google Photos and Albums backup tool"
17 | dependencies = [
18 | "attrs",
19 | "exif",
20 | "appdirs",
21 | "pyyaml",
22 | "psutil",
23 | "google_auth_oauthlib",
24 | ] # Add project dependencies here, e.g. ["click", "numpy"]
25 | dynamic = ["version"]
26 | license.file = "LICENSE"
27 | readme = "README.rst"
28 | requires-python = ">=3.8"
29 |
30 | [project.optional-dependencies]
31 | dev = [
32 | "black",
33 | "mypy",
34 | "pipdeptree",
35 | "pre-commit",
36 | "pydata-sphinx-theme>=0.12",
37 | "pytest",
38 | "pytest-cov",
39 | "ruff",
40 | "sphinx-autobuild",
41 | "sphinx-copybutton",
42 | "sphinx-design",
43 | "sphinx_rtd_theme_github_versions",
44 | "tox-direct",
45 | "types-mock",
46 | "mock",
47 | "types-setuptools",
48 | "types-requests",
49 | "types-PyYAML",
50 | ]
51 |
52 | [project.scripts]
53 | gphotos-sync = "gphotos_sync.__main__:main"
54 |
55 | [project.urls]
56 | GitHub = "https://github.com/gilesknap/gphotos-sync"
57 |
58 | [[project.authors]] # Further authors may be added by duplicating this section
59 | email = "gilesknap@gmail.com"
60 | name = "Giles Knap"
61 |
62 |
63 | [tool.setuptools_scm]
64 | write_to = "src/gphotos_sync/_version.py"
65 |
66 | [tool.mypy]
67 | ignore_missing_imports = true # Ignore missing stubs in imported modules
68 |
69 | [tool.pytest.ini_options]
70 | # Run pytest with all our checkers, and don't spam us with massive tracebacks on error
71 | addopts = """
72 | --tb=native -vv --doctest-modules --doctest-glob="*.rst"
73 | """
74 | # https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings
75 | filterwarnings = [
76 | "error",
77 | "ignore:.*socket.*:ResourceWarning",
78 | # this deprecation is rather serious for gphotos-sync as it relies on sqlite
79 | # doing date conversion quite heavily - will ignore the deprecation for now
80 | # DeprecationWarning: The default datetime adapter is deprecated as of Python 3.12;
81 | "ignore:.*sqlite3.*:DeprecationWarning:",
82 | # like the above
83 | "ignore:.*datetime.utcfromtimestamp:DeprecationWarning:"
84 | ]
85 | # Doctest python code in docs, python code in src docstrings, test functions in tests
86 | testpaths = "docs src tests"
87 |
88 | [tool.coverage.run]
89 | data_file = "/tmp/photos_sync.coverage"
90 |
91 | [tool.coverage.paths]
92 | # Tests are run from installed location, map back to the src directory
93 | source = ["src", "**/site-packages/"]
94 |
95 | # tox must currently be configured via an embedded ini string
96 | # See: https://github.com/tox-dev/tox/issues/999
97 | [tool.tox]
98 | legacy_tox_ini = """
99 | [tox]
100 | skipsdist=True
101 |
102 | [testenv:{pre-commit,mypy,pytest,docs}]
103 | # Don't create a virtualenv for the command, requires tox-direct plugin
104 | direct = True
105 | passenv = *
106 | allowlist_externals =
107 | pytest
108 | pre-commit
109 | mypy
110 | sphinx-build
111 | sphinx-autobuild
112 | commands =
113 | pytest: pytest --cov=photos_sync --cov-report term --cov-report xml:cov.xml {posargs}
114 | mypy: mypy src tests {posargs}
115 | pre-commit: pre-commit run --all-files {posargs}
116 | docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html
117 | """
118 |
119 |
120 | [tool.ruff]
121 | src = ["src", "tests"]
122 | line-length = 88
123 |
124 | [tool.ruff.lint]
125 | select = [
126 | "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4
127 | "E", # pycodestyle errors - https://beta.ruff.rs/docs/rules/#error-e
128 | "F", # pyflakes rules - https://beta.ruff.rs/docs/rules/#pyflakes-f
129 | "W", # pycodestyle warnings - https://beta.ruff.rs/docs/rules/#warning-w
130 | "I001", # isort
131 | ]
132 |
133 | [tool.codespell]
134 | # Ref: https://github.com/codespell-project/codespell#using-a-config-file
135 | skip = '.git*,*.css'
136 | check-hidden = true
137 | # ignore-regex = ''
138 | ignore-words-list = 'implementors'
139 |
--------------------------------------------------------------------------------
/src/gphotos_sync/BadIds.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from pathlib import Path
3 | from typing import Dict
4 |
5 | from yaml import YAMLError, safe_dump, safe_load
6 |
7 | log = logging.getLogger(__name__)
8 |
9 |
10 | class BadIds:
11 | """keeps a list of media items with ID in a YAML file. The YAML file
12 | allows a user to easily investigate their list of media items that have
13 | failed to download
14 |
15 | Attributes:
16 | items: Dict[str, Item] bad ids found with identifying attributes
17 | bad_ids_filename: str: file where ids are stored/read
18 | bad_ids_found: count of Ids found since instantiation
19 | """
20 |
21 | def __init__(self, root_folder: Path):
22 | self.items: Dict[str, dict] = {}
23 | self.bad_ids_filename: Path = root_folder / "gphotos.bad_ids.yaml"
24 | self.bad_ids_found: int = 0
25 | self.load_ids()
26 |
27 | def __exit__(self, exc_type, exc_val, exc_tb):
28 | self.store_ids()
29 |
30 | def load_ids(self):
31 | try:
32 | with self.bad_ids_filename.open("r") as stream:
33 | self.items = safe_load(stream) or {}
34 | log.debug("bad_ids file, loaded %d bad ids", len(self.items))
35 | except (YAMLError, IOError):
36 | log.debug("no bad_ids file, bad ids list is empty")
37 |
38 | def store_ids(self):
39 | with self.bad_ids_filename.open("w") as stream:
40 | safe_dump(self.items, stream, default_flow_style=False)
41 |
42 | def add_id(self, path: str, gid: str, product_url: str, e: Exception):
43 | item = {"path": str(path), "product_url": product_url}
44 | self.items[gid] = item
45 | log.debug("BAD ID %s for %s", gid, path, exc_info=e)
46 |
47 | def check_id_ok(self, gid: str):
48 | if gid in self.items:
49 | self.bad_ids_found += 1
50 | return False
51 | else:
52 | return True
53 |
54 | def report(self):
55 | if self.bad_ids_found > 0:
56 | log.warning(
57 | "WARNING: skipped %d files listed in %s",
58 | self.bad_ids_found,
59 | self.bad_ids_filename,
60 | )
61 |
--------------------------------------------------------------------------------
/src/gphotos_sync/BaseMedia.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from pathlib import Path
3 | from typing import Optional
4 |
5 | from .Checks import get_check
6 |
7 |
8 | class BaseMedia(object):
9 | """Base class for media model classes.
10 | These provide a standard interface for media items that have been loaded
11 | from disk / loaded from DB / retrieved from the Google Photos Library
12 | """
13 |
14 | TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
15 |
16 | def __init__(self, root_path: Path = Path(""), **k_args):
17 | self._id: str = ""
18 | self._relative_folder: Path = Path("")
19 | self._root_path: Path = root_path
20 | self._duplicate_number: int = 0
21 |
22 | # Allow boolean check to fail on empty BaseMedia
23 | def __bool__(self) -> bool:
24 | return self._id is not None
25 |
26 | def set_path_by_date(self, root: Path, use_flat_path: bool = False):
27 | y = "{:04d}".format(self.create_date.year)
28 | m = "{:02d}".format(self.create_date.month)
29 | if use_flat_path:
30 | self._relative_folder = root / (y + "-" + m)
31 | else:
32 | self._relative_folder = root / y / m
33 |
34 | @property
35 | def is_video(self) -> bool:
36 | # guard against no mimetype issue #231
37 | if not self.mime_type:
38 | return False
39 | return self.mime_type.startswith("video")
40 |
41 | @property
42 | def duplicate_number(self) -> int:
43 | return self._duplicate_number
44 |
45 | @duplicate_number.setter
46 | def duplicate_number(self, value: int):
47 | self._duplicate_number = value
48 |
49 | # Relative path to the media file from the root of the sync folder
50 | # e.g. 'Google Photos/2017/09'.
51 | @property
52 | def relative_path(self) -> Path:
53 | return self._relative_folder / self.filename
54 |
55 | # as above but without the filename appended
56 | @property
57 | def relative_folder(self) -> Path:
58 | return self._relative_folder
59 |
60 | @property
61 | def full_folder(self) -> Path:
62 | return self._root_path / self._relative_folder
63 |
64 | @property
65 | def filename(self) -> str:
66 | if self.duplicate_number > 0:
67 | file_str = "%(base)s (%(duplicate)d)%(ext)s" % {
68 | "base": Path(self.orig_name).stem,
69 | "ext": Path(self.orig_name).suffix,
70 | "duplicate": self.duplicate_number + 1,
71 | }
72 | filename = get_check().valid_file_name(file_str)
73 | else:
74 | filename = self.orig_name
75 | return filename
76 |
77 | # ----- Properties for override below -----
78 | @property
79 | def size(self) -> int:
80 | raise NotImplementedError
81 |
82 | @property
83 | def id(self) -> Optional[str]:
84 | raise NotImplementedError
85 |
86 | @property
87 | def description(self) -> str:
88 | raise NotImplementedError
89 |
90 | @property
91 | def orig_name(self) -> str:
92 | raise NotImplementedError
93 |
94 | @property
95 | def create_date(self) -> datetime:
96 | raise NotImplementedError
97 |
98 | @property
99 | def modify_date(self) -> datetime:
100 | raise NotImplementedError
101 |
102 | @property
103 | def mime_type(self) -> Optional[str]:
104 | raise NotImplementedError
105 |
106 | @property
107 | def url(self) -> Optional[str]:
108 | raise NotImplementedError
109 |
--------------------------------------------------------------------------------
/src/gphotos_sync/Checks.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import random
4 | import re
5 | import shutil
6 | import subprocess
7 | from pathlib import Path
8 | from typing import Optional
9 |
10 | from psutil import disk_partitions
11 |
12 | log = logging.getLogger(__name__)
13 |
14 |
15 | class Checks:
16 | # regex for illegal characters in file names and database queries
17 | fix_linux = re.compile(r"[/]|[\x00-\x1f]|\x7f|\x00")
18 | fix_windows = re.compile(r'[,<>:"/\\|?*]|[\x00-\x1f]|\x7f|\x00')
19 | fix_windows_ending = re.compile("([ .]+$)")
20 | fix_whitespace_ending = re.compile("([ \t]+$)")
21 | fix_unicode = re.compile(r"[^\x00-\x7F]")
22 |
23 | # these filesystem types will have NTFS style filename restrictions
24 | windows_fs = ["fat", "ntfs", "9p"]
25 | WINDOWS_MAX_PATH = 248
26 |
27 | def __init__(self, root_path: Path, max_filename, ntfs):
28 | self.root_path: Path = root_path
29 | self._root_str: str = str(root_path).lower()
30 | if ntfs:
31 | self.is_linux: bool = False
32 | else:
33 | self.is_linux = self._check_linux_filesystem()
34 | self.is_symlink: bool = self._symlinks_supported()
35 | self.is_unicode: bool = self._unicode_filenames()
36 | self.is_case_sensitive: bool = self._check_case_sensitive()
37 | self.max_path: int = self._get_max_path_length()
38 | if max_filename > 0:
39 | self.max_filename: int = max_filename
40 | else:
41 | self.max_filename = self._get_max_filename_length()
42 |
43 | def _check_linux_filesystem(self) -> bool:
44 | filesystem_type = ""
45 | for part in disk_partitions(True):
46 | if part.mountpoint == "/":
47 | filesystem_type = part.fstype
48 | continue
49 |
50 | if self._root_str.startswith(part.mountpoint.lower()):
51 | filesystem_type = part.fstype
52 | break
53 | filesystem_type = filesystem_type.lower()
54 | is_linux = not any(fs in filesystem_type for fs in self.windows_fs)
55 | log.info(f"Target filesystem {self._root_str} is {filesystem_type}")
56 |
57 | return is_linux
58 |
59 | def _symlinks_supported(self) -> bool:
60 | log.debug("Checking if is filesystem supports symbolic links...")
61 | dst = "test_dst_%s" % random.getrandbits(32)
62 | src = "test_src_%s" % random.getrandbits(32)
63 | dst_file = self.root_path / dst
64 | src_file = self.root_path / src
65 | src_file.touch()
66 | try:
67 | log.debug("attempting to symlink %s to %s", src_file, dst_file)
68 | dst_file.symlink_to(src_file)
69 | dst_file.unlink()
70 | src_file.unlink()
71 | except BaseException:
72 | if src_file.exists():
73 | src_file.unlink()
74 | log.error("Symbolic links not supported")
75 | log.error("Albums are not going to be synced - requires symlinks")
76 | return False
77 | return True
78 |
79 | def _unicode_filenames(self) -> bool:
80 | log.debug("Checking if File system supports unicode filenames...")
81 | testfile = self.root_path / ".unicode_test.\U0001f604"
82 |
83 | is_unicode = False
84 | try:
85 | testfile.touch()
86 | except BaseException:
87 | log.info("Filesystem does not support Unicode filenames")
88 | else:
89 | log.info("Filesystem supports Unicode filenames")
90 | is_unicode = True
91 | testfile.unlink()
92 | return is_unicode
93 |
94 | def _check_case_sensitive(self) -> bool:
95 | log.debug("Checking if File system is case insensitive...")
96 |
97 | check_folder = self.root_path / ".gphotos_check"
98 | case_file = check_folder / "Temp.Test"
99 | no_case_file = check_folder / "TEMP.TEST"
100 |
101 | is_sensitive = False
102 | try:
103 | check_folder.mkdir()
104 | case_file.touch()
105 | no_case_file.touch()
106 | files = list(check_folder.glob("*"))
107 | if len(files) != 2:
108 | raise ValueError("separate case files not seen")
109 | case_file.unlink()
110 | no_case_file.unlink()
111 | except (FileExistsError, FileNotFoundError, ValueError):
112 | log.info("Case insensitive file system found")
113 | else:
114 | log.info("Case sensitive file system found")
115 | is_sensitive = True
116 | finally:
117 | shutil.rmtree(check_folder)
118 | return is_sensitive
119 |
120 | def _get_max_path_length(self) -> int:
121 | # safe windows length
122 | max_length = self.WINDOWS_MAX_PATH
123 |
124 | # found this on:
125 | # https://stackoverflow.com/questions/32807560/how-do-i-get-in-python-the-maximum-filesystem-path-length-in-unix
126 | try:
127 | max_length = int(
128 | subprocess.check_output(["getconf", "PATH_MAX", str(self.root_path)])
129 | )
130 | except BaseException:
131 | # for failures choose a safe size for Windows filesystems
132 | log.info(
133 | f"can't determine max filepath length, defaulting to " f"{max_length}"
134 | )
135 | log.info("Max Path Length: %d" % max_length)
136 | return max_length
137 |
138 | def _get_max_filename_length(self) -> int:
139 | # safe windows length
140 | max_filename = self.WINDOWS_MAX_PATH
141 | try:
142 | info = os.statvfs(str(self.root_path))
143 | max_filename = info.f_namemax
144 | except BaseException:
145 | # for failures choose a safe size for Windows filesystems
146 | max_filename = 248
147 | log.info(
148 | f"can't determine max filename length, " f"defaulting to {max_filename}"
149 | )
150 | log.info("Max filename length: %d" % max_filename)
151 | return max_filename
152 |
153 | def valid_file_name(self, s: str) -> str:
154 | """
155 | makes sure a string is valid for creating file names
156 |
157 | :param (str) s: input string
158 | :return: (str): sanitized string
159 | """
160 | s = self.fix_whitespace_ending.sub("", s)
161 |
162 | if self.is_linux:
163 | s = self.fix_linux.sub("_", s)
164 | else:
165 | s = self.fix_windows.sub("_", s)
166 | s = self.fix_windows_ending.split(s)[0]
167 |
168 | if not self.is_unicode:
169 | s = self.fix_unicode.sub("_", s)
170 |
171 | return s
172 |
173 |
174 | # a global for holding the current root folder check results
175 | root_folder: Optional[Checks] = None
176 |
177 | # TODO: this approach needs review
178 |
179 |
180 | # ugly global stuff to avoid passing Checks object everywhere
181 | def do_check(root: Path, max_filename=0, ntfs=None):
182 | global root_folder
183 | root_folder = Checks(root, max_filename, ntfs)
184 | return root_folder
185 |
186 |
187 | def get_check():
188 | return root_folder
189 |
--------------------------------------------------------------------------------
/src/gphotos_sync/DatabaseMedia.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # coding: utf8
3 | from datetime import datetime
4 | from pathlib import Path
5 | from typing import Optional, TypeVar
6 |
7 | from gphotos_sync import Utils
8 | from gphotos_sync.BaseMedia import BaseMedia
9 | from gphotos_sync.Checks import get_check
10 |
11 | # this allows self reference to this class in its factory methods
12 | D = TypeVar("D", bound="DatabaseMedia")
13 |
14 |
15 | # noinspection PyUnresolvedReferences
16 | # pylint: disable=no-member
17 | class DatabaseMedia(BaseMedia):
18 | """A Class for reading and writing BaseMedia objects to and from
19 | database tables
20 |
21 | The standard BaseMedia attributes are represented here. This dumb class
22 | is used for representing any MediaBase derived class that has been read out
23 | of the Database.
24 |
25 | Attributes:
26 | _id: remote identifier from Google Photos
27 | _url: the 'product URL' which takes you to the Web view for this file
28 | _relative_folder: root relative path to file
29 | _filename: local filename
30 | _orig_name: as above minus any duplicate number suffix
31 | _duplicate_number: which instance if > 1 file has same orig_name
32 | _size: files size on disk
33 | _mime_type: string representation of file type
34 | _date: modification date
35 | _create_date: creation date
36 | _description:
37 | _downloaded: true if previously downloaded to disk
38 | """
39 |
40 | def __init__(
41 | self,
42 | _id: str = "",
43 | _uid: str = "",
44 | _url: str = "",
45 | _relative_folder: Path = Path(),
46 | _filename: str = "",
47 | _orig_name: str = "",
48 | _duplicate_number: int = 0,
49 | _size: int = 0,
50 | _mime_type: str = "",
51 | _description: str = "",
52 | _date: datetime = Utils.MINIMUM_DATE,
53 | _create_date: datetime = Utils.MINIMUM_DATE,
54 | _downloaded: bool = False,
55 | _location: str = "",
56 | _is_shared_album: bool = False,
57 | ):
58 | super(DatabaseMedia, self).__init__()
59 | self._id = _id
60 | self._uid = _uid
61 | self._url = _url
62 | self._relative_folder = _relative_folder
63 | self._filename = _filename
64 | self._orig_name = _orig_name
65 | self._duplicate_number = _duplicate_number
66 | self._size = _size
67 | self._mime_type = _mime_type
68 | self._description = _description
69 | self._date = _date
70 | self._create_date = _create_date
71 | self._downloaded = _downloaded
72 | self._location = _location
73 | self._is_shared_album = _is_shared_album
74 |
75 | # this is used to replace meta data that has been extracted from the
76 | # file system and overrides that provided by Google API
77 | # noinspection PyAttributeOutsideInit
78 | def update_extra_meta(self, uid, create_date, size):
79 | self._uid = uid
80 | self._create_date = create_date
81 | self._size = size
82 |
83 | @property
84 | def location(self) -> Optional[str]:
85 | """
86 | image GPS information
87 | """
88 | return self._location
89 |
90 | # ----- BaseMedia base class override Properties below -----
91 | @property
92 | def size(self) -> int:
93 | return self._size
94 |
95 | @property
96 | def mime_type(self) -> Optional[str]:
97 | return self._mime_type
98 |
99 | @property
100 | def id(self) -> str:
101 | return self._id
102 |
103 | @property
104 | def uid(self) -> str:
105 | return self._uid
106 |
107 | @property
108 | def description(self) -> str:
109 | """
110 | The description of the file
111 | """
112 | return get_check().valid_file_name(self._description)
113 |
114 | @property
115 | def orig_name(self) -> str:
116 | """
117 | Original filename before duplicate name handling
118 | """
119 | return get_check().valid_file_name(self._orig_name)
120 |
121 | @property
122 | def filename(self) -> str:
123 | """
124 | filename including a suffix to make it unique if duplicates exist
125 | """
126 | return get_check().valid_file_name(self._filename)
127 |
128 | @property
129 | def create_date(self) -> datetime:
130 | """
131 | Creation date
132 | """
133 | return self._create_date
134 |
135 | @property
136 | def modify_date(self) -> datetime:
137 | """
138 | Modify Date
139 | """
140 | return self._date
141 |
142 | @property
143 | def url(self) -> str:
144 | """
145 | Remote url to retrieve this file from the server
146 | """
147 | return self._url
148 |
--------------------------------------------------------------------------------
/src/gphotos_sync/DbRow.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from datetime import datetime
3 | from typing import Any, ClassVar, Dict, List, Mapping, Type, TypeVar
4 |
5 | from gphotos_sync.BaseMedia import BaseMedia
6 | from gphotos_sync.DatabaseMedia import DatabaseMedia
7 |
8 | from . import Utils
9 |
10 | log = logging.getLogger(__name__)
11 |
12 |
13 | class DbRow:
14 | """
15 | base class for classes representing a row in the database to allow easy
16 | generation of queries and an easy interface for callers e.g.
17 | q = "INSERT INTO SyncFiles ({0}) VALUES ({1})".format(
18 | self.SyncRow.columns, self.SyncRow.params)
19 | self.cur.execute(q, row.dict)
20 |
21 | Class Attributes:
22 | cols_def: keys are names of columns and items are their type
23 | no_update: list of columns that are not for UPDATE (i.e primary key)
24 | columns: string to substitute into SELECT {} or INSERT INTO
({})
25 | params: a string to insert after VALUES in a sql INSERT or UPDATE
26 | update: a string to substitute into 'UPDATE
Set {0}'
27 | empty: True for an empty row
28 | dict: a dictionary of the above attributes
29 |
30 | The remaining attributes are on a per subclass basis and are
31 | generated from row_def by the db_row decorator
32 | """
33 |
34 | # The first 3 class attributes are overridden by each subclass
35 | table: str = ""
36 | cols_def: ClassVar[Mapping[str, Type]] = {}
37 | no_update: ClassVar[List[str]] = []
38 | # the remaining attributes are generated by the db_row class decorator
39 | # using the information supplied in the above 3 attributes
40 | columns: ClassVar[str] = ""
41 | params: ClassVar[str] = ""
42 | update: ClassVar[str] = ""
43 | dict: ClassVar[Dict] = {}
44 | empty: ClassVar[bool] = True
45 |
46 | # The first 2 functions are to be overridden by each subclass, they provide
47 | # conversion to and from BaseMedia derived types
48 | def to_media(self) -> DatabaseMedia:
49 | raise NotImplementedError
50 |
51 | @classmethod
52 | def from_media(cls, media: BaseMedia) -> "DbRow":
53 | raise NotImplementedError
54 |
55 | def __init__(self, _):
56 | # TODO this whole dynamic class thing is a little overdone
57 | # Here I init commonly used field in derived classes to ease
58 | # Mypy errors but this just demonstrates that we have given up
59 | # on Types providing protection from coding errors
60 | # But Hey - it was fun to make.
61 | self.RemoteId = ""
62 |
63 | # empty row object = boolean False
64 | def __bool__(self) -> bool:
65 | return not self.empty
66 |
67 | T = TypeVar("T", bound="DbRow")
68 |
69 | # factory method for delivering a DbRow derived object based on named arguments
70 | @classmethod
71 | def make(cls: Type[T], **k_args: Any) -> T:
72 | new_row_class = cls(None)
73 | for key, value in k_args.items():
74 | if not hasattr(new_row_class, key):
75 | raise ValueError("{0} does not have column {1}".format(cls, key))
76 | setattr(new_row_class, key, value)
77 | cls.empty = False
78 | return new_row_class
79 |
80 | @classmethod
81 | def db_row(cls, row_class: Type["DbRow"]) -> Type["DbRow"]:
82 | """
83 | class decorator function to create RowClass classes that represent a row
84 | in the database
85 |
86 | :param (DbRow) row_class: the class to decorate
87 | :return (DbRow): the decorated class
88 | """
89 | row_class.columns = ",".join(row_class.cols_def.keys())
90 | row_class.params = ":" + ",:".join(row_class.cols_def.keys())
91 | row_class.update = ",".join(
92 | "{0}=:{0}".format(col)
93 | for col in row_class.cols_def.keys()
94 | if col not in row_class.no_update
95 | )
96 |
97 | # The constructor for the generated class, takes an instance of
98 | # database result row and generates a DbRow derived object
99 | def init(self, result_row=None):
100 | for col, col_type in self.cols_def.items():
101 | if not result_row:
102 | value = None
103 | elif col_type == datetime:
104 | value = Utils.string_to_date(result_row[col])
105 | else:
106 | value = result_row[col]
107 | setattr(self, col, value)
108 | if not result_row:
109 | self.empty = True
110 |
111 | # TODO: look into how to make MyPy like these method/property overrides
112 | @property # type: ignore
113 | def to_dict(self):
114 | return self.__dict__
115 |
116 | row_class.__init__ = init # type: ignore
117 | row_class.dict = to_dict # type: ignore
118 | return row_class
119 |
--------------------------------------------------------------------------------
/src/gphotos_sync/GoogleAlbumMedia.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # coding: utf8
3 | from .BaseMedia import BaseMedia
4 |
5 |
6 | class GoogleAlbumMedia(BaseMedia):
7 | def __init__(self, media_json):
8 | self.__media_json = media_json
9 | super(GoogleAlbumMedia, self).__init__()
10 |
11 | # ----- override Properties below -----
12 | @property
13 | def size(self):
14 | try:
15 | return int(self.__media_json["mediaItemsCount"])
16 | except KeyError:
17 | return 0
18 |
19 | @property
20 | def id(self):
21 | return self.__media_json["id"]
22 |
23 | @property
24 | def description(self):
25 | return self.orig_name
26 |
27 | @property
28 | def orig_name(self) -> str:
29 | try:
30 | return self.__media_json["title"]
31 | except KeyError:
32 | return "none"
33 |
34 | @property
35 | def create_date(self):
36 | return None
37 |
38 | @property
39 | def modify_date(self):
40 | return None
41 |
42 | @property
43 | def mime_type(self):
44 | return "none"
45 |
46 | @property
47 | def url(self):
48 | return self.__media_json["productUrl"]
49 |
--------------------------------------------------------------------------------
/src/gphotos_sync/GoogleAlbumsRow.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from datetime import datetime
3 |
4 | from gphotos_sync import Utils
5 | from gphotos_sync.DatabaseMedia import DatabaseMedia
6 | from gphotos_sync.DbRow import DbRow
7 | from gphotos_sync.GoogleAlbumMedia import GoogleAlbumMedia
8 |
9 | log = logging.getLogger(__name__)
10 |
11 |
12 | @DbRow.db_row
13 | # pylint: disable=no-member
14 | class GoogleAlbumsRow(DbRow):
15 | """
16 | generates a class with attributes for each of the columns in the
17 | SyncFiles table
18 | """
19 |
20 | table = "Albums"
21 | cols_def = {
22 | "RemoteId": str,
23 | "AlbumName": str,
24 | "Size": int,
25 | "StartDate": datetime,
26 | "EndDate": datetime,
27 | "SyncDate": datetime,
28 | "Downloaded": bool,
29 | "IsSharedAlbum": bool,
30 | }
31 |
32 | # All properties on this class are dynamically added from the above
33 | # list using DbRow.make. Hence Mypy cannot see them and they need
34 | # type: ignore
35 | def to_media(self) -> DatabaseMedia: # type:ignore
36 | db_media = DatabaseMedia(
37 | _id=self.RemoteId, # type:ignore
38 | _filename=self.AlbumName, # type:ignore
39 | _size=self.Size, # type:ignore
40 | _create_date=self.EndDate, # type:ignore
41 | _is_shared_album=self.IsSharedAlbum, # type:ignore
42 | )
43 | return db_media
44 |
45 | @classmethod
46 | def from_media(cls, album) -> GoogleAlbumMedia: # type:ignore
47 | pass
48 |
49 | @classmethod
50 | def from_parm(
51 | cls, album_id, filename, size, start, end, is_shared
52 | ) -> "GoogleAlbumsRow":
53 | new_row = cls.make(
54 | RemoteId=album_id,
55 | AlbumName=filename,
56 | Size=size,
57 | StartDate=start,
58 | EndDate=end,
59 | SyncDate=Utils.date_to_string(datetime.now()),
60 | Downloaded=0,
61 | IsSharedAlbum=is_shared,
62 | )
63 | return new_row
64 |
--------------------------------------------------------------------------------
/src/gphotos_sync/GooglePhotosIndex.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from datetime import datetime
3 | from pathlib import Path
4 | from typing import Optional
5 |
6 | from gphotos_sync import Utils
7 | from gphotos_sync.GooglePhotosMedia import GooglePhotosMedia
8 | from gphotos_sync.GooglePhotosRow import GooglePhotosRow
9 | from gphotos_sync.LocalData import LocalData
10 | from gphotos_sync.LocalFilesMedia import LocalFilesMedia
11 | from gphotos_sync.restclient import RestClient
12 | from gphotos_sync.Settings import Settings
13 |
14 | log = logging.getLogger(__name__)
15 |
16 |
17 | class GooglePhotosIndex(object):
18 | PAGE_SIZE = 100
19 |
20 | def __init__(
21 | self, api: RestClient, root_folder: Path, db: LocalData, settings: Settings
22 | ):
23 | self._api: RestClient = api
24 | self._root_folder: Path = root_folder
25 | self._db: LocalData = db
26 |
27 | self.files_indexed: int = 0
28 | self.files_index_skipped: int = 0
29 |
30 | if db:
31 | self.latest_download = self._db.get_scan_date() or Utils.MINIMUM_DATE
32 |
33 | self.settings = settings
34 | self.start_date: datetime = settings.start_date
35 | self.end_date: datetime = settings.end_date
36 | self.include_video: bool = settings.include_video
37 | self.rescan: bool = settings.rescan
38 | self.favourites = settings.favourites_only
39 | self.case_insensitive_fs: bool = settings.case_insensitive_fs
40 | self.archived: bool = settings.archived
41 | self._use_flat_path: bool = settings.use_flat_path
42 | self._media_folder: Path = settings.photos_path
43 |
44 | def check_for_removed_in_folder(self, folder: Path):
45 | for pth in folder.iterdir():
46 | if pth.is_dir():
47 | self.check_for_removed_in_folder(pth)
48 | else:
49 | local_path = pth.relative_to(self._root_folder).parent
50 | if pth.match(".*") or pth.match("gphotos*"):
51 | continue
52 | file_row = self._db.get_file_by_path(
53 | GooglePhotosRow, local_path, pth.name
54 | )
55 | if not file_row:
56 | pth.unlink()
57 | log.warning("%s deleted", pth)
58 |
59 | def check_for_removed(self):
60 | """Removes local files that are no longer represented in the Photos
61 | Library - presumably because they were deleted.
62 |
63 | note for partial scans using date filters this is still OK because
64 | for a file to exist it must have been indexed in a previous scan
65 | """
66 | log.warning("Finding and removing deleted media ...")
67 | self.check_for_removed_in_folder(self._root_folder / self._media_folder)
68 |
69 | def write_media_index(self, media: GooglePhotosMedia, update: bool = True):
70 | self._db.put_row(GooglePhotosRow.from_media(media), update)
71 | if media.create_date > self.latest_download:
72 | self.latest_download = media.create_date
73 |
74 | def search_media(
75 | self,
76 | page_token: Optional[int] = None,
77 | start_date: Optional[datetime] = None,
78 | end_date: Optional[datetime] = None,
79 | do_video: bool = False,
80 | favourites: bool = False,
81 | ) -> dict:
82 | class Y:
83 | def __init__(self, y, m, d):
84 | self.year = y
85 | self.month = m
86 | self.day = d
87 |
88 | def to_dict(self):
89 | return {"year": self.year, "month": self.month, "day": self.day}
90 |
91 | start = Y(1900, 1, 1)
92 | end = Y(3000, 1, 1)
93 | type_list = ["ALL_MEDIA"]
94 |
95 | if start_date:
96 | start = Y(start_date.year, start_date.month, start_date.day)
97 | if end_date:
98 | end = Y(end_date.year, end_date.month, end_date.day)
99 | if not do_video:
100 | type_list = ["PHOTO"]
101 | if favourites:
102 | feature = "FAVORITES"
103 | else:
104 | feature = "NONE"
105 |
106 | if not page_token:
107 | log.info(
108 | "searching for media start=%s, end=%s, videos=%s",
109 | start_date,
110 | end_date,
111 | do_video,
112 | )
113 | if not start_date and not end_date and do_video and not favourites:
114 | # no search criteria so do a list of the entire library
115 | log.debug("mediaItems.list ...")
116 | return self._api.mediaItems.list.execute( # type: ignore
117 | pageToken=page_token, pageSize=self.PAGE_SIZE
118 | ).json()
119 | else:
120 | body = {
121 | "pageToken": page_token,
122 | "pageSize": self.PAGE_SIZE,
123 | "filters": {
124 | "dateFilter": {
125 | "ranges": [
126 | {"startDate": start.to_dict(), "endDate": end.to_dict()}
127 | ]
128 | },
129 | "mediaTypeFilter": {"mediaTypes": type_list},
130 | "featureFilter": {"includedFeatures": [feature]},
131 | "includeArchivedMedia": self.archived,
132 | },
133 | }
134 | log.debug("mediaItems.search with body:\n{}".format(body))
135 | return self._api.mediaItems.search.execute(body).json() # type: ignore
136 |
137 | def index_photos_media(self) -> int:
138 | log.warning("Indexing Google Photos Files ...")
139 | total_listed = 0
140 |
141 | if self.start_date:
142 | start_date = self.start_date
143 | elif self.rescan:
144 | start_date = None
145 | else:
146 | start_date = self._db.get_scan_date()
147 |
148 | items_json = self.search_media(
149 | start_date=start_date,
150 | end_date=self.end_date,
151 | do_video=self.include_video,
152 | favourites=self.favourites,
153 | )
154 |
155 | while items_json:
156 | media_json = items_json.get("mediaItems", [])
157 | items_count = 0
158 | for media_item_json in media_json:
159 | items_count += 1
160 | total_listed += 1
161 | media_item = GooglePhotosMedia(
162 | media_item_json, to_lower=self.case_insensitive_fs
163 | )
164 | media_item.set_path_by_date(self._media_folder, self._use_flat_path)
165 | (num, row) = self._db.file_duplicate_no(
166 | str(media_item.filename),
167 | str(media_item.relative_folder),
168 | media_item.id,
169 | )
170 | # we just learned if there were any duplicates in the db
171 | media_item.duplicate_number = num
172 |
173 | if self.settings.progress and total_listed % 10 == 0:
174 | log.warning(f"Listed {total_listed} items ...\033[F")
175 | if not row:
176 | self.files_indexed += 1
177 | log.info(
178 | "Indexed %d %s", self.files_indexed, media_item.relative_path
179 | )
180 | self.write_media_index(media_item, False)
181 | if self.files_indexed % 2000 == 0:
182 | self._db.store()
183 | elif media_item.modify_date > row.modify_date:
184 | self.files_indexed += 1
185 | # todo at present there is no modify date in the API
186 | # so updates cannot be monitored - this won't get called
187 | log.info(
188 | "Updated Index %d %s",
189 | self.files_indexed,
190 | media_item.relative_path,
191 | )
192 | self.write_media_index(media_item, True)
193 | else:
194 | self.files_index_skipped += 1
195 | log.debug(
196 | "Skipped Index (already indexed) %d %s",
197 | self.files_index_skipped,
198 | media_item.relative_path,
199 | )
200 | self.latest_download = max(
201 | self.latest_download, media_item.create_date
202 | )
203 | log.debug(
204 | "search_media parsed %d media_items with %d PAGE_SIZE",
205 | items_count,
206 | GooglePhotosIndex.PAGE_SIZE,
207 | )
208 |
209 | next_page = items_json.get("nextPageToken")
210 | if next_page:
211 | items_json = self.search_media(
212 | page_token=next_page,
213 | start_date=start_date,
214 | end_date=self.end_date,
215 | do_video=self.include_video,
216 | favourites=self.favourites,
217 | )
218 | else:
219 | break
220 |
221 | # scan (in reverse date order) completed so the next incremental scan
222 | # can start from the most recent file in this scan
223 | if not self.start_date:
224 | self._db.set_scan_date(last_date=self.latest_download)
225 |
226 | log.warning(f"indexed {self.files_indexed} items")
227 | return self.files_indexed
228 |
229 | def get_extra_meta(self):
230 | count = 0
231 | log.warning(
232 | "updating index with extra metadata for comparison "
233 | "(may take some time) ..."
234 | )
235 | media_items = self._db.get_rows_by_search(GooglePhotosRow, uid="ISNULL")
236 | for item in media_items:
237 | file_path = self._root_folder / item.relative_path
238 | # if this item has a uid it has been scanned before
239 | if file_path.exists():
240 | local_file = LocalFilesMedia(file_path)
241 | count += 1
242 | log.info("updating metadata %d on %s", count, file_path)
243 | item.update_extra_meta(
244 | local_file.uid, local_file.create_date, local_file.size
245 | )
246 | # erm lets try some duck typing then !
247 | # todo is the DbRow class model rubbish or brilliant Python?
248 | # noinspection PyTypeChecker
249 | self._db.put_row(GooglePhotosRow.from_media(item), update=True)
250 | if count % 2000 == 0:
251 | self._db.store()
252 | else:
253 | log.debug("skipping metadata (already scanned) on %s", file_path)
254 | log.warning("updating index with extra metadata complete")
255 |
--------------------------------------------------------------------------------
/src/gphotos_sync/GooglePhotosMedia.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # coding: utf8
3 |
4 | import re
5 | from datetime import datetime
6 | from pathlib import Path
7 | from typing import Any, Dict, List, Optional, Union
8 |
9 | from gphotos_sync.Checks import get_check
10 |
11 | from . import Utils
12 | from .BaseMedia import BaseMedia
13 |
14 | DuplicateSuffix = re.compile(r"(.*)[ ]\(\d+\)(\..*)")
15 |
16 | JSONValue = Union[str, int, float, bool, None, Dict[str, Any], List[Any]]
17 | JSONType = Union[Dict[str, JSONValue], List[JSONValue]]
18 |
19 |
20 | class GooglePhotosMedia(BaseMedia):
21 | def __init__(self, media_json, to_lower=False):
22 | self.__media_json: Dict[str, Any] = media_json
23 | self.__uid: Optional[str] = None
24 | self.__lower = to_lower
25 | super(GooglePhotosMedia, self).__init__()
26 | if self.is_video:
27 | self.__media_meta = media_json.get("mediaMetadata").get("video")
28 | else:
29 | self.__media_meta = media_json.get("mediaMetadata").get("photo")
30 |
31 | @property
32 | def uid(self) -> Optional[str]:
33 | return self.__uid
34 |
35 | # ----- override Properties below -----
36 | @property
37 | def size(self) -> int:
38 | return 0
39 |
40 | @property
41 | def id(self) -> str:
42 | return self.__media_json["id"]
43 |
44 | @property
45 | def description(self) -> str:
46 | try:
47 | return get_check().valid_file_name(self.__media_json["description"])
48 | except KeyError:
49 | return ""
50 |
51 | @property
52 | def orig_name(self) -> str:
53 | try:
54 | name = self.__media_json["filename"]
55 | matches = DuplicateSuffix.match(name)
56 | if matches:
57 | # append the prefix and the suffix, ditching the ' (n)'
58 | name = "{}{}".format(*matches.groups())
59 | except KeyError:
60 | name = ""
61 | if self.__lower:
62 | name = name.lower()
63 | return str(Path(get_check().valid_file_name(name)))
64 |
65 | @property
66 | def create_date(self) -> datetime:
67 | try:
68 | create_date = self.__media_json["mediaMetadata"].get("creationTime")
69 | photo_date = Utils.string_to_date(create_date)
70 | except (KeyError, ValueError):
71 | photo_date = Utils.MINIMUM_DATE
72 |
73 | # TODO: why does mypy not like this?
74 | return photo_date # type: ignore
75 |
76 | @property
77 | def modify_date(self) -> datetime:
78 | date = Utils.MINIMUM_DATE
79 | return date
80 |
81 | @property
82 | def mime_type(self) -> Optional[str]:
83 | return self.__media_json.get("mimeType")
84 |
85 | @property
86 | def url(self) -> Optional[str]:
87 | return self.__media_json.get("productUrl")
88 |
89 | @property
90 | def camera_model(self):
91 | camera_model = None
92 | try:
93 | camera_model = self.__media_meta.get("cameraModel")
94 | except (KeyError, AttributeError):
95 | pass
96 | return camera_model
97 |
--------------------------------------------------------------------------------
/src/gphotos_sync/GooglePhotosRow.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from datetime import datetime
3 | from pathlib import Path
4 |
5 | from gphotos_sync.BaseMedia import BaseMedia
6 | from gphotos_sync.DatabaseMedia import DatabaseMedia
7 | from gphotos_sync.DbRow import DbRow
8 | from gphotos_sync.GooglePhotosMedia import GooglePhotosMedia
9 |
10 | log = logging.getLogger(__name__)
11 |
12 |
13 | @DbRow.db_row
14 | # pylint: disable=no-member
15 | class GooglePhotosRow(DbRow):
16 | """
17 | generates a class with attributes for each of the columns in the
18 | SyncFiles table
19 | """
20 |
21 | table = "SyncFiles"
22 | cols_def = {
23 | "Id": int,
24 | "RemoteId": str,
25 | "Uid": str,
26 | "Url": str,
27 | "Path": str,
28 | "FileName": str,
29 | "OrigFileName": str,
30 | "DuplicateNo": int,
31 | "FileSize": int,
32 | "MimeType": str,
33 | "Description": str,
34 | "ModifyDate": datetime,
35 | "CreateDate": datetime,
36 | "SyncDate": datetime,
37 | "Downloaded": int,
38 | "Location": str,
39 | }
40 | no_update = ["Id"]
41 |
42 | # All properties on this class are dynamically added from the above
43 | # list using DbRow.make. Hence Mypy cannot see them and they need
44 | # type: ignore
45 | def to_media(self) -> DatabaseMedia:
46 | pth = Path(self.Path) if self.Path else None # type: ignore
47 | db_media = DatabaseMedia(
48 | _id=self.RemoteId, # type: ignore
49 | _url=self.Url, # type: ignore
50 | _uid=self.Uid, # type: ignore
51 | _relative_folder=pth, # type: ignore
52 | _filename=self.FileName, # type: ignore
53 | _orig_name=self.OrigFileName, # type: ignore
54 | _duplicate_number=self.DuplicateNo, # type: ignore
55 | _size=self.FileSize, # type: ignore
56 | _mime_type=self.MimeType, # type: ignore
57 | _description=self.Description, # type: ignore
58 | _date=self.ModifyDate, # type: ignore
59 | _create_date=self.CreateDate, # type: ignore
60 | _downloaded=self.Downloaded, # type: ignore
61 | _location=self.Location, # type: ignore
62 | )
63 | return db_media
64 |
65 | @classmethod
66 | def from_media( # type: ignore
67 | cls,
68 | media: GooglePhotosMedia,
69 | ) -> "GooglePhotosRow":
70 | now_time = datetime.now().strftime(BaseMedia.TIME_FORMAT)
71 | new_row = cls.make(
72 | RemoteId=media.id,
73 | Url=media.url,
74 | Uid=media.uid,
75 | Path=str(media.relative_folder),
76 | FileName=str(media.filename),
77 | OrigFileName=str(media.orig_name),
78 | DuplicateNo=media.duplicate_number,
79 | FileSize=media.size,
80 | MimeType=media.mime_type,
81 | Description=media.description,
82 | ModifyDate=media.modify_date,
83 | CreateDate=media.create_date,
84 | SyncDate=now_time,
85 | Downloaded=0,
86 | Location="",
87 | )
88 | return new_row
89 |
--------------------------------------------------------------------------------
/src/gphotos_sync/LocalFilesMedia.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # coding: utf8
3 |
4 | import logging
5 | import re
6 | from datetime import datetime
7 | from json import loads
8 | from mimetypes import guess_type
9 | from pathlib import Path
10 | from subprocess import PIPE, CalledProcessError, run
11 | from typing import Any, Dict, List, Optional, Union
12 |
13 | import exif
14 | from plum.exceptions import UnpackError
15 |
16 | from . import Utils
17 | from .BaseMedia import BaseMedia
18 |
19 | log = logging.getLogger(__name__)
20 |
21 | JSONValue = Union[str, int, float, bool, None, Dict[str, Any], List[Any]]
22 | JSONType = Union[Dict[str, JSONValue], List[JSONValue]]
23 |
24 | # command to extract creation date from video files
25 | FF_PROBE = [
26 | "ffprobe",
27 | "-v",
28 | "quiet",
29 | "-print_format",
30 | "json",
31 | "-show_entries",
32 | "stream=index,codec_type:stream_tags=creation_time:format_" "tags=creation_time",
33 | ]
34 |
35 | # Huawei adds these camera modes to description but Google Photos seems wise to
36 | # it and does not report this in its description metadata
37 | # noinspection SpellCheckingInspection
38 | HUAWEI_JUNK = [
39 | "jhdr",
40 | "edf",
41 | "sdr",
42 | "cof",
43 | "nor",
44 | "mde",
45 | "oznor",
46 | "btf",
47 | "btfmdn",
48 | "ptfbty",
49 | "mef",
50 | "bsh",
51 | "dav",
52 | "rpt",
53 | "fbt",
54 | "burst",
55 | "rhdr",
56 | "fbtmdn",
57 | "ptr",
58 | "rbtoz",
59 | "btr",
60 | "rbsh",
61 | "btroz",
62 | ]
63 | # regex to check if this (might be) a duplicate with ' (n)' suffix. Note that
64 | # 'demo (0).jpg' and 'demo (1).jpg' are not in the scheme
65 | # but 'demo (2).jpg' to 'demo (999).jpg' are
66 | DUPLICATE_MATCH = re.compile(r"(.*) \(([2-9]|\d{2,3})\)\.(.*)")
67 |
68 |
69 | class LocalFilesMedia(BaseMedia):
70 | def __init__(self, full_path: Path):
71 | super(LocalFilesMedia, self).__init__()
72 | (mime, _) = guess_type(str(full_path))
73 | self.__mime_type: str = mime or "application/octet-stream"
74 | self.__full_path: Path = full_path
75 | self.__original_name: str = full_path.name
76 | self.__ffprobe_installed = True
77 | self.__createDate: datetime = Utils.MINIMUM_DATE
78 |
79 | self.got_meta: bool = False
80 | self.__exif: exif.Image = None
81 |
82 | matches = DUPLICATE_MATCH.match(str(full_path.name))
83 | if matches:
84 | # this is (probably) a duplicate with 'file (n).jpg' format
85 | # extract the original name and duplicate no.
86 | # -1 is because the first duplicate is labelled ' (2)'
87 | self.duplicate_number: int = int(matches[2]) - 1
88 | self.__original_name = matches[1] + "." + matches[3]
89 |
90 | if self.is_video:
91 | self.get_video_meta()
92 | else:
93 | self.get_exif()
94 | self.get_image_date()
95 |
96 | def get_video_meta(self):
97 | if self.__ffprobe_installed:
98 | try:
99 | command = FF_PROBE + [str(self.__full_path)]
100 | result = run(command, stdout=PIPE, check=True)
101 | out = str(result.stdout.decode("utf-8"))
102 | json = loads(out)
103 | t = json["format"]["tags"]["creation_time"]
104 | self.__createDate = Utils.string_to_date(t)
105 | self.got_meta = True
106 | except FileNotFoundError:
107 | # this means there is no ffprobe installed
108 | self.__ffprobe_installed = False
109 | except CalledProcessError:
110 | pass
111 | except KeyError:
112 | # ffprobe worked but there is no creation time in the JSON
113 | pass
114 |
115 | if not self.__createDate:
116 | # just use file date
117 | self.__createDate = datetime.utcfromtimestamp(
118 | self.__full_path.stat().st_mtime
119 | )
120 |
121 | def get_image_date(self):
122 | p_date = None
123 | if self.got_meta:
124 | try:
125 | # noinspection PyUnresolvedReferences
126 | p_date = Utils.string_to_date(self.__exif.datetime_original)
127 | except (AttributeError, ValueError, KeyError):
128 | try:
129 | # noinspection PyUnresolvedReferences
130 | p_date = Utils.string_to_date(self.__exif.datetime)
131 | except (AttributeError, ValueError, KeyError):
132 | pass
133 | if not p_date:
134 | # just use file date
135 | p_date = datetime.utcfromtimestamp(self.__full_path.stat().st_mtime)
136 | self.__createDate = p_date
137 |
138 | def get_exif(self):
139 | try:
140 | with open(str(self.relative_folder / self.filename), "rb") as image_file:
141 | self.__exif = exif.Image(image_file)
142 | self.got_meta = True
143 | except (IOError, AssertionError):
144 | self.got_meta = False
145 | except (UnpackError, ValueError):
146 | log.error(
147 | "Problem reading exif data from file: %s",
148 | str(self.relative_folder / self.filename),
149 | )
150 | self.got_meta = False
151 |
152 | @property
153 | def uid(self) -> str:
154 | if not self.got_meta:
155 | uid = "none"
156 | elif self.is_video:
157 | uid = "not_supported"
158 | else:
159 | try:
160 | # noinspection PyUnresolvedReferences
161 | uid = self.__exif.image_unique_id
162 | except (AttributeError, KeyError):
163 | uid = "no_uid_in_exif"
164 | return uid
165 |
166 | # ----- override Properties below -----
167 | @property
168 | def relative_folder(self) -> Path:
169 | return self.__full_path.parent
170 |
171 | @property
172 | def size(self) -> int:
173 | s = self.__full_path.stat().st_size
174 | return s
175 |
176 | @property
177 | def id(self) -> Optional[str]:
178 | return None
179 |
180 | @property
181 | def description(self) -> str:
182 | try:
183 | result = self.__exif.image_description # type: ignore
184 | except (AttributeError, KeyError, ValueError, RuntimeWarning):
185 | result = None
186 | if result:
187 | if result in HUAWEI_JUNK:
188 | result = ""
189 | else:
190 | result = ""
191 | return result
192 |
193 | @property
194 | def orig_name(self) -> str:
195 | return self.__original_name
196 |
197 | @property
198 | def create_date(self) -> datetime:
199 | return self.__createDate
200 |
201 | @property
202 | def modify_date(self) -> datetime:
203 | return self.create_date
204 |
205 | @property
206 | def mime_type(self) -> str:
207 | return self.__mime_type
208 |
209 | @property
210 | def url(self) -> Optional[str]:
211 | return None
212 |
213 | @property
214 | def camera_model(self):
215 | try:
216 | # noinspection PyUnresolvedReferences
217 | cam = "{} {}".format(self.__exif.make, self.__exif.model)
218 | except (AttributeError, KeyError):
219 | cam = None
220 | return cam
221 |
--------------------------------------------------------------------------------
/src/gphotos_sync/LocalFilesRow.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from datetime import datetime
3 | from pathlib import Path
4 |
5 | from gphotos_sync.BaseMedia import BaseMedia
6 | from gphotos_sync.DatabaseMedia import DatabaseMedia
7 | from gphotos_sync.DbRow import DbRow
8 | from gphotos_sync.LocalFilesMedia import LocalFilesMedia
9 |
10 | log = logging.getLogger(__name__)
11 |
12 |
13 | @DbRow.db_row
14 | class LocalFilesRow(DbRow):
15 | """
16 | generates a class with attributes for each of the columns in the
17 | LocalFiles table
18 | """
19 |
20 | table = "LocalFiles"
21 | cols_def = {
22 | "Id": int,
23 | "RemoteId": str,
24 | "Uid": str,
25 | "Path": str,
26 | "FileName": str,
27 | "OriginalFileName": str,
28 | "DuplicateNo": int,
29 | "MimeType": str,
30 | "Description": str,
31 | "FileSize": int,
32 | "ModifyDate": datetime,
33 | "CreateDate": datetime,
34 | "SyncDate": datetime,
35 | }
36 | no_update = ["Id"]
37 |
38 | # All properties on this class are dynamically added from the above
39 | # list using DbRow.make. Hence Mypy cannot see them and they need
40 | # type: ignore
41 | def to_media(self) -> DatabaseMedia:
42 | pth = Path(self.Path) if self.Path else None # type: ignore
43 | db_media = DatabaseMedia(
44 | _id=self.RemoteId, # type: ignore
45 | _relative_folder=pth, # type: ignore
46 | _filename=self.FileName, # type: ignore
47 | _orig_name=self.OriginalFileName, # type: ignore
48 | _duplicate_number=self.DuplicateNo, # type: ignore
49 | _size=self.FileSize, # type: ignore
50 | _mime_type=self.MimeType, # type: ignore
51 | _description=self.Description, # type: ignore
52 | _date=self.ModifyDate, # type: ignore
53 | _create_date=self.CreateDate, # type: ignore
54 | )
55 | return db_media
56 |
57 | @classmethod
58 | def from_media(cls, media: LocalFilesMedia) -> "LocalFilesRow": # type: ignore
59 | now_time = datetime.now().strftime(BaseMedia.TIME_FORMAT)
60 | new_row = cls.make(
61 | Path=str(media.relative_folder),
62 | Uid=media.uid,
63 | FileName=media.filename,
64 | OriginalFileName=media.orig_name,
65 | DuplicateNo=media.duplicate_number,
66 | FileSize=media.size,
67 | MimeType=media.mime_type,
68 | Description=media.description,
69 | ModifyDate=media.modify_date,
70 | CreateDate=media.create_date,
71 | SyncDate=now_time,
72 | )
73 | return new_row
74 |
--------------------------------------------------------------------------------
/src/gphotos_sync/LocalFilesScan.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import shutil
3 | from pathlib import Path
4 | from typing import Callable
5 |
6 | from .LocalData import LocalData
7 | from .LocalFilesMedia import LocalFilesMedia
8 | from .LocalFilesRow import LocalFilesRow
9 |
10 | log = logging.getLogger(__name__)
11 |
12 | IGNORE_FOLDERS = ["albums", "comparison", "gphotos-code"]
13 |
14 |
15 | class LocalFilesScan(object):
16 | """A Class for indexing media files in a folder for comparison to a
17 | Google Photos Library
18 | """
19 |
20 | def __init__(self, root_folder: Path, scan_folder: Path, db: LocalData):
21 | """
22 | Parameters:
23 | scan_folder: path to the root of local files to scan
24 | db: local database for indexing
25 | """
26 | self._scan_folder: Path = scan_folder
27 | self._root_folder: Path = root_folder
28 | self._comparison_folder = self._root_folder / "comparison"
29 | self._ignore_files: str = str(root_folder / "*gphotos*")
30 | self._ignore_folders = [root_folder / path for path in IGNORE_FOLDERS]
31 | self._db: LocalData = db
32 | self.count = 0
33 |
34 | def scan_local_files(self):
35 | if not self._scan_folder.exists():
36 | raise FileNotFoundError(
37 | "Compare folder {} does not exist".format(self._scan_folder)
38 | )
39 | # for self-comparison, make sure there is no comparison folder
40 | # or we'll get recursive entries
41 | if self._comparison_folder.exists():
42 | log.debug("removing previous comparison tree")
43 | shutil.rmtree(self._comparison_folder)
44 | log.warning("removing previous local scan data")
45 | self._db.local_erase()
46 | log.warning("Indexing comparison folder %s", self._scan_folder)
47 | self.scan_folder(self._scan_folder, self.index_local_item)
48 | log.warning(
49 | "Indexed %d files in comparison folder %s", self.count, self._scan_folder
50 | )
51 |
52 | def scan_folder(self, folder: Path, index: Callable):
53 | if folder.exists():
54 | log.debug("scanning %s", folder)
55 | for pth in folder.iterdir():
56 | if pth.is_dir():
57 | # IGNORE_FOLDERS for comparing against 'self'
58 | if pth not in self._ignore_folders:
59 | self.scan_folder(pth, index)
60 | elif not pth.is_symlink():
61 | if not pth.match(self._ignore_files):
62 | self.count += index(pth)
63 | if self.count and self.count % 20000 == 0:
64 | self._db.store()
65 |
66 | def index_local_item(self, path: Path) -> int:
67 | if self._db.local_exists(file_name=path.name, path=str(path.parent)):
68 | result = 0
69 | log.debug("already indexed local file: %s", path)
70 | else:
71 | result = 1
72 | try:
73 | lf = LocalFilesMedia(path)
74 | log.info(
75 | "indexed local file: %s %s %s %s",
76 | lf.relative_folder,
77 | lf.filename,
78 | lf.create_date,
79 | lf.uid,
80 | )
81 | self._db.put_row(LocalFilesRow.from_media(lf))
82 | except Exception:
83 | log.error(
84 | "file %s could not be made into a media obj", path, exc_info=True
85 | )
86 | raise
87 | return result
88 |
89 | def find_missing_gphotos(self):
90 | log.warning("matching local files and photos library ...")
91 | self._db.find_local_matches()
92 | log.warning("creating comparison folder ...")
93 | folders_missing = self._comparison_folder / "missing_files"
94 | if self._comparison_folder.exists():
95 | log.debug("removing previous comparison tree")
96 | shutil.rmtree(self._comparison_folder)
97 |
98 | for i, orig_path in enumerate(self._db.get_missing_paths()):
99 | link_path = folders_missing / orig_path.relative_to(self._scan_folder)
100 | log.debug("adding missing file %d link %s", i, link_path)
101 | if orig_path.exists():
102 | if not link_path.parent.exists():
103 | link_path.parent.mkdir(parents=True)
104 | if not link_path.exists():
105 | link_path.symlink_to(orig_path)
106 |
107 | folders_extras = self._comparison_folder / "extra_files"
108 | for i, orig_path in enumerate(self._db.get_extra_paths()):
109 | link_path = folders_extras / orig_path
110 | log.debug("adding extra file %d link %s", i, link_path)
111 | extra = self._root_folder / orig_path
112 | if extra.exists():
113 | if not link_path.parent.exists():
114 | link_path.parent.mkdir(parents=True)
115 | if not link_path.exists():
116 | link_path.symlink_to(extra)
117 |
118 | flat_duplicates = self._comparison_folder / "duplicates"
119 | flat_duplicates.mkdir(parents=True)
120 | duplicate_group = 0
121 | prev_id = ""
122 | for i, (rid, orig_path) in enumerate(self._db.get_duplicates()):
123 | if rid != prev_id:
124 | duplicate_group += 1
125 | prev_id = rid
126 | duplicate = self._root_folder / orig_path
127 | if duplicate.exists():
128 | log.debug(
129 | "adding duplicate group %d file %d link %s",
130 | duplicate_group,
131 | i,
132 | orig_path,
133 | )
134 | flat_link = flat_duplicates / "{:05d}_{:03d}_{}".format(
135 | i, duplicate_group, orig_path.name
136 | )
137 | flat_link.symlink_to(duplicate)
138 |
--------------------------------------------------------------------------------
/src/gphotos_sync/Logging.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import sys
3 | from datetime import datetime
4 | from pathlib import Path
5 | from typing import Any, Optional
6 |
7 | # add a trace level for logging all API calls to Google
8 | # this will be filtered into a separate file
9 | TRACE_API_NUM = 9
10 | TRACE_API = "TRACE"
11 |
12 |
13 | class MaxLevelFilter(logging.Filter):
14 | """Filters (lets through) all messages with level < LEVEL"""
15 |
16 | def __init__(self, level: int, allow_trace: bool):
17 | self.level = level
18 | self.allow_trace = allow_trace
19 |
20 | def filter(self, record):
21 | result = self.allow_trace or record.levelno != TRACE_API_NUM
22 | result &= record.levelno < self.level
23 | return result
24 |
25 |
26 | def trace(self, message, *args, **kwargs):
27 | if self.isEnabledFor(TRACE_API_NUM):
28 | self._log(TRACE_API_NUM, message, args, **kwargs)
29 |
30 |
31 | setattr(logging.Logger, "trace", trace)
32 |
33 |
34 | def setup_logging(log_level: str, log_filename: Path, folder: Path):
35 | # add out custom trace level logging
36 | logging.addLevelName(TRACE_API_NUM, TRACE_API)
37 |
38 | # if we are debugging requests library is too noisy
39 | logging.getLogger("requests").setLevel(logging.WARNING)
40 | logging.getLogger("requests_oauthlib").setLevel(logging.WARNING)
41 | logging.getLogger("urllib3").setLevel(logging.WARNING)
42 |
43 | # determine the numeric log level from the string argument
44 | if log_level.upper() == TRACE_API.upper():
45 | # todo - i would expect addLevelName to do this for us?
46 | numeric_level: Optional[Any] = TRACE_API_NUM
47 | else:
48 | numeric_level = getattr(logging, log_level.upper(), None)
49 | if not isinstance(numeric_level, int):
50 | raise ValueError("Invalid log level: %s" % log_level)
51 |
52 | # configure the log files locations
53 | if log_filename:
54 | log_file = folder / log_filename
55 | if log_file.is_dir():
56 | log_file = log_file / "gphotos{}.log".format(
57 | datetime.now().strftime("%y%m%d_%H%M%S")
58 | )
59 | else:
60 | log_file = folder / "gphotos.log"
61 | trace_file = log_file.with_suffix(".trace")
62 |
63 | # define handler for the trace file
64 | log_handler = logging.FileHandler(log_file, mode="w", encoding="utf-8")
65 | log_handler.setLevel(logging.DEBUG)
66 |
67 | # define handler for the trace file
68 | trace_handler = logging.FileHandler(trace_file, mode="w", encoding="utf-8")
69 | trace_handler.setLevel(TRACE_API_NUM)
70 | trace_handler.addFilter(MaxLevelFilter(logging.DEBUG, True))
71 |
72 | # set format for files
73 | formatter = logging.Formatter(
74 | "%(asctime)s %(name)-12s %(levelname)-8s " "%(message)s",
75 | datefmt="%m-%d %H:%M:%S",
76 | )
77 | log_handler.setFormatter(formatter)
78 | trace_handler.setFormatter(formatter)
79 |
80 | # define handlers for std out and std err
81 | stdout_handler = logging.StreamHandler(sys.stdout)
82 | stderr_handler = logging.StreamHandler(sys.stderr)
83 | # std error prints error and higher
84 | stderr_handler.setLevel(max(numeric_level, logging.ERROR))
85 | # std out prints everything below error (but always filters out trace)
86 | stdout_handler.setLevel(numeric_level)
87 | stdout_handler.addFilter(MaxLevelFilter(logging.ERROR, False))
88 |
89 | # set a format which is simpler for console use
90 | formatter = logging.Formatter(
91 | "%(asctime)s %(levelname)-8s %(message)s ", datefmt="%m-%d %H:%M:%S"
92 | )
93 | stdout_handler.setFormatter(formatter)
94 | stderr_handler.setFormatter(formatter)
95 |
96 | # add the handlers to the root logger
97 | logging.getLogger().addHandler(stdout_handler)
98 | logging.getLogger().addHandler(stderr_handler)
99 | logging.getLogger().addHandler(log_handler)
100 | logging.getLogger().addHandler(trace_handler)
101 | # set logging level for root logger
102 | # always do debug for the log file, drop to trace if requested
103 | logging.getLogger().setLevel(min(numeric_level, logging.DEBUG))
104 |
--------------------------------------------------------------------------------
/src/gphotos_sync/Queries.py:
--------------------------------------------------------------------------------
1 | # coding: utf8
2 |
3 | # noinspection SqlWithoutWhere
4 | match = [
5 | """
6 | -- stage 0 - remove previous matches
7 | UPDATE LocalFiles
8 | set RemoteId = NULL ;
9 | """,
10 | """
11 | -- stage 1 - look for unique matches
12 | UPDATE LocalFiles
13 | set RemoteId = (SELECT RemoteId
14 | FROM SyncFiles
15 | WHERE LocalFiles.OriginalFileName == SyncFiles.OrigFileName
16 | AND (LocalFiles.Uid == SyncFiles.Uid AND
17 | LocalFiles.CreateDate = SyncFiles.CreateDate)
18 | -- 32 character ids are legitimate and unique
19 | OR (LocalFiles.Uid == SyncFiles.Uid AND
20 | length(LocalFiles.Uid) == 32)
21 | )
22 | WHERE LocalFiles.Uid notnull and LocalFiles.Uid != 'not_supported'
23 | ;
24 | """,
25 | """
26 | -- stage 2 - mop up entries that have no UID (this is a small enough
27 | -- population that filename + CreateDate is probably unique)
28 | with pre_match(RemoteId) as
29 | (SELECT RemoteId from LocalFiles where RemoteId notnull)
30 | UPDATE LocalFiles
31 | set RemoteId = (SELECT RemoteId
32 | FROM SyncFiles
33 | WHERE LocalFiles.OriginalFileName == SyncFiles.OrigFileName
34 | AND LocalFiles.CreateDate = SyncFiles.CreateDate
35 | AND SyncFiles.RemoteId NOT IN (select RemoteId from pre_match)
36 | )
37 | WHERE LocalFiles.RemoteId isnull
38 | ;
39 | """,
40 | """
41 | -- stage 3 FINAL - mop up on filename only
42 | with pre_match(RemoteId) as
43 | (SELECT RemoteId from LocalFiles where RemoteId notnull)
44 | UPDATE LocalFiles
45 | set RemoteId = (SELECT RemoteId
46 | FROM SyncFiles
47 | WHERE LocalFiles.OriginalFileName == SyncFiles.OrigFileName
48 | AND SyncFiles.RemoteId NOT IN (select RemoteId from pre_match)
49 | )
50 | WHERE LocalFiles.RemoteId isnull
51 | ;
52 | """,
53 | ]
54 |
55 | missing_files = """select * from LocalFiles where RemoteId isnull;"""
56 |
57 | pre_extra_files = """
58 | -- overwrite NULL RemoteIds or extra_files will get no matches
59 | update LocalFiles set RemoteId='not_found' where RemoteId isnull
60 | """
61 |
62 | extra_files = """
63 | select * from SyncFiles where RemoteId not in (select RemoteId from LocalFiles)
64 | -- and uid not in (select uid from LocalFiles where length(SyncFiles.Uid) = 32)
65 | ;
66 | """
67 |
68 | duplicate_files = """
69 | with matches(RemoteId) as (
70 | select RemoteId from LocalFiles
71 | GROUP BY LocalFiles.RemoteId
72 | HAVING COUNT(LocalFiles.RemoteId) > 1
73 | )
74 | SELECT *
75 | FROM LocalFiles
76 | JOIN matches
77 | WHERE LocalFiles.RemoteId = matches.RemoteId
78 | ;
79 | """
80 |
--------------------------------------------------------------------------------
/src/gphotos_sync/Settings.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from pathlib import Path
3 |
4 | from attr import dataclass
5 |
6 | """
7 | Defines a dataclass for passing all configuration information between
8 | the worker classes
9 | """
10 |
11 |
12 | @dataclass
13 | class Settings:
14 | """
15 | A Class to hold command line settings
16 | """
17 |
18 | start_date: datetime
19 | end_date: datetime
20 | use_start_date: bool
21 |
22 | photos_path: Path
23 | use_flat_path: bool
24 |
25 | albums_path: Path
26 | shared_albums_path: Path
27 | album_index: bool
28 | omit_album_date: bool
29 | album_invert: bool
30 | no_album_sorting: bool
31 | album: str
32 | album_regex: str
33 | shared_albums: bool
34 |
35 | favourites_only: bool
36 | include_video: bool
37 | archived: bool
38 | use_hardlinks: bool
39 |
40 | retry_download: bool
41 | rescan: bool
42 | max_retries: int
43 | max_threads: int
44 | case_insensitive_fs: bool
45 | progress: bool
46 |
47 | ntfs_override: bool
48 |
49 | month_format: str
50 | path_format: str
51 |
52 | image_timeout: int
53 | video_timeout: int
54 |
--------------------------------------------------------------------------------
/src/gphotos_sync/Utils.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import re
3 | from datetime import datetime
4 | from os import utime
5 | from pathlib import Path
6 | from sqlite3 import Timestamp
7 | from tempfile import NamedTemporaryFile
8 | from typing import Optional
9 |
10 | # Todo tisy this into a class (combine with checks?)
11 |
12 | log = logging.getLogger(__name__)
13 |
14 | DATE_NORMALIZE = re.compile(r"(\d\d\d\d).(\d\d).(\d\d).(\d\d).(\d\d).(\d\d)")
15 | SHORT_DATE_NORMALIZE = re.compile(r"(\d\d\d\d).(\d\d).(\d\d)")
16 | PatType = type(DATE_NORMALIZE)
17 | DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
18 | DATE_ONLY = "%Y-%m-%d"
19 | MINIMUM_DATE = datetime(year=1900, month=1, day=1)
20 |
21 |
22 | # incredibly windows cannot handle dates below 1980
23 | def safe_str_time(date_time: datetime, date_format: str) -> str:
24 | global MINIMUM_DATE
25 | if date_time < MINIMUM_DATE:
26 | date_time = MINIMUM_DATE
27 | return date_time.strftime(date_format)
28 |
29 |
30 | def safe_timestamp(d: datetime) -> Timestamp:
31 | global MINIMUM_DATE
32 | if d < MINIMUM_DATE:
33 | d = MINIMUM_DATE
34 | return d
35 |
36 |
37 | def date_to_string(date_t: datetime):
38 | return date_t.strftime(DATE_FORMAT)
39 |
40 |
41 | def maximum_date() -> datetime:
42 | return datetime.max
43 |
44 |
45 | def minimum_date(root_folder: Path) -> datetime:
46 | global MINIMUM_DATE
47 |
48 | with NamedTemporaryFile(dir=str(root_folder)) as t:
49 | # determine the minimum date that is usable on the
50 | # target filesystem (is there a better way to do this?)
51 | # '1971', '1981' here is a bit of a hack - really we need use
52 | # UTC correctly throughout this project - but for that to work well
53 | # we would need all cameras to be well behaved WRT timezones.
54 | min_dates = (1800, 1900, 1970, 1971, 1980, 1981)
55 |
56 | for min_date in min_dates:
57 | try:
58 | d = datetime.min.replace(year=min_date)
59 | utime(t.name, (d.timestamp(), d.timestamp()))
60 | except (ValueError, OverflowError, OSError):
61 | continue
62 | break
63 |
64 | if not d:
65 | raise ValueError("cannot set file modification date")
66 | MINIMUM_DATE = d
67 | log.debug("MINIMUM_DATE = %s" % MINIMUM_DATE)
68 | return MINIMUM_DATE
69 |
70 |
71 | def date_string_normalize(
72 | date_in: str, pattern_in: PatType, pattern_out: str # type: ignore
73 | ) -> Optional[datetime]:
74 | result = None
75 | matches = pattern_in.match(date_in) # type: ignore
76 | if matches:
77 | normalized = pattern_out.format(*matches.groups())
78 | result = datetime.strptime(normalized, DATE_FORMAT)
79 | return result
80 |
81 |
82 | def string_to_date(date_string: str) -> Optional[datetime]:
83 | result = None
84 | if date_string:
85 | result = date_string_normalize(date_string, DATE_NORMALIZE, "{}-{}-{} {}:{}:{}")
86 | if result is None:
87 | result = date_string_normalize(
88 | date_string, SHORT_DATE_NORMALIZE, "{}-{}-{} 00:00:00"
89 | )
90 | if result is None:
91 | log.warning("WARNING: time string %s illegal", date_string)
92 |
93 | return result
94 |
--------------------------------------------------------------------------------
/src/gphotos_sync/__init__.py:
--------------------------------------------------------------------------------
1 | from importlib.metadata import version # noqa
2 |
3 | __version__ = version("gphotos-sync")
4 | del version
5 |
6 | __all__ = ["__version__"]
7 |
--------------------------------------------------------------------------------
/src/gphotos_sync/authorize.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from datetime import timezone
3 | from json import JSONDecodeError, dump, load
4 | from pathlib import Path
5 | from typing import List, Optional
6 |
7 | from google_auth_oauthlib.flow import InstalledAppFlow
8 | from requests.adapters import HTTPAdapter
9 | from requests_oauthlib import OAuth2Session
10 | from urllib3.util.retry import Retry
11 |
12 | log = logging.getLogger(__name__)
13 |
14 |
15 | # OAuth endpoints given in the Google API documentation
16 | authorization_base_url = "https://accounts.google.com/o/oauth2/v2/auth"
17 | token_uri = "https://www.googleapis.com/oauth2/v4/token"
18 |
19 |
20 | class Authorize:
21 | def __init__(
22 | self,
23 | scope: List[str],
24 | token_file: Path,
25 | secrets_file: Path,
26 | max_retries: int = 5,
27 | port: int = 8080,
28 | ):
29 | """A very simple class to handle Google API authorization flow
30 | for the requests library. Includes saving the token and automatic
31 | token refresh.
32 |
33 | Args:
34 | scope: list of the scopes for which permission will be granted
35 | token_file: full path of a file in which the user token will be
36 | placed. After first use the previous token will also be read in from
37 | this file
38 | secrets_file: full path of the client secrets file obtained from
39 | Google Api Console
40 | """
41 | self.max_retries = max_retries
42 | self.scope: List[str] = scope
43 | self.token_file: Path = token_file
44 | self.session = None
45 | self.token = None
46 | self.secrets_file = secrets_file
47 | self.port = port
48 |
49 | try:
50 | with secrets_file.open("r") as stream:
51 | all_json = load(stream)
52 | secrets = all_json["installed"]
53 | self.client_id = secrets["client_id"]
54 | self.client_secret = secrets["client_secret"]
55 | self.redirect_uri = secrets["redirect_uris"][0]
56 | self.token_uri = secrets["token_uri"]
57 | self.extra = {
58 | "client_id": self.client_id,
59 | "client_secret": self.client_secret,
60 | }
61 |
62 | except (JSONDecodeError, IOError):
63 | print("missing or bad secrets file: {}".format(secrets_file))
64 | exit(1)
65 |
66 | def load_token(self) -> Optional[str]:
67 | try:
68 | with self.token_file.open("r") as stream:
69 | token = load(stream)
70 | except (JSONDecodeError, IOError):
71 | return None
72 | return token
73 |
74 | def save_token(self, token: str):
75 | with self.token_file.open("w") as stream:
76 | dump(token, stream)
77 | try:
78 | self.token_file.chmod(0o600)
79 | except (PermissionError,):
80 | log.warning("Could not change permissions of the token file")
81 |
82 | def authorize(self):
83 | """Initiates OAuth2 authentication and authorization flow"""
84 | token = self.load_token()
85 |
86 | if token:
87 | self.session = OAuth2Session(
88 | self.client_id,
89 | token=token,
90 | auto_refresh_url=self.token_uri,
91 | auto_refresh_kwargs=self.extra,
92 | token_updater=self.save_token,
93 | )
94 | else:
95 | flow = InstalledAppFlow.from_client_secrets_file(
96 | self.secrets_file, scopes=self.scope
97 | )
98 | # localhost and bind to 0.0.0.0 always works even in a container.
99 | flow.run_local_server(
100 | open_browser=False, bind_addr="0.0.0.0", port=self.port
101 | )
102 |
103 | self.session = flow.authorized_session()
104 |
105 | # Mapping for backward compatibility
106 | oauth2_token = {
107 | "access_token": flow.credentials.token,
108 | "refresh_token": flow.credentials.refresh_token,
109 | "token_type": "Bearer",
110 | "scope": flow.credentials.scopes,
111 | "expires_at": flow.credentials.expiry.replace(
112 | tzinfo=timezone.utc
113 | ).timestamp(),
114 | }
115 |
116 | self.save_token(oauth2_token)
117 |
118 | # set up the retry behaviour for the authorized session
119 | retries = Retry(
120 | total=self.max_retries,
121 | backoff_factor=5,
122 | status_forcelist=[500, 502, 503, 504, 429],
123 | allowed_methods=frozenset(["GET", "POST"]),
124 | raise_on_status=False,
125 | respect_retry_after_header=True,
126 | )
127 | # apply the retry behaviour to our session by replacing the default HTTPAdapter
128 | self.session.mount("https://", HTTPAdapter(max_retries=retries))
129 |
--------------------------------------------------------------------------------
/src/gphotos_sync/restclient.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from json import dumps
3 | from typing import Any, Dict, List, Union
4 |
5 | from requests import Session
6 | from requests.exceptions import HTTPError
7 |
8 | JSONValue = Union[str, int, float, bool, None, Dict[str, Any], List[Any]]
9 | JSONType = Union[Dict[str, JSONValue], List[JSONValue]]
10 |
11 | log = logging.getLogger(__name__)
12 |
13 | """
14 | Defines very simple classes to create a callable interface to a REST api
15 | from a discovery REST description document.
16 |
17 | Intended as a super simple replacement for google-api-python-client, using
18 | requests instead of httplib2
19 |
20 | giles 2018
21 | """
22 |
23 |
24 | # a dummy decorator to suppress unresolved references on this dynamic class
25 | def dynamic_attrs(cls):
26 | return cls
27 |
28 |
29 | @dynamic_attrs
30 | class RestClient:
31 | """
32 | To create a callable client to a REST API, instantiate this class.
33 | For details of the discovery API see:
34 | https://developers.google.com/discovery/v1/using
35 | """
36 |
37 | def __init__(self, api_url: str, auth_session: Session):
38 | """
39 | Create a rest API object tree from an api description
40 | """
41 | self.auth_session: Session = auth_session
42 | service_document = self.auth_session.get(api_url).json()
43 | self.json: JSONType = service_document
44 | self.base_url: str = str(service_document["baseUrl"])
45 | for c_name, collection in service_document["resources"].items():
46 | new_collection = Collection(c_name)
47 | setattr(self, c_name, new_collection)
48 | for m_name, method in collection["methods"].items():
49 | new_method = Method(self, **method)
50 | setattr(new_collection, m_name, new_method)
51 |
52 |
53 | # pylint: disable=no-member
54 | class Method:
55 | """ Represents a method in the REST API. To be called using its execute
56 | method, the execute method takes a single parameter for body and then
57 | named parameters for Http Request parameters.
58 |
59 | e.g.
60 | api = RestClient(https://photoslibrary.googleapis.com/$discovery' \
61 | '/rest?version=v1', authenticated_session)
62 | api.albums.list.execute(pageSize=50)
63 | """
64 |
65 | def __init__(self, service: RestClient, **k_args: Dict[str, str]):
66 | self.path: str = ""
67 | self.httpMethod: str = ""
68 | self.service: RestClient = service
69 | self.__dict__.update(k_args)
70 | self.path_args: List[str] = []
71 | self.query_args: List[str] = []
72 | if hasattr(self, "parameters"):
73 | for key, value in self.parameters.items(): # type: ignore
74 | if value["location"] == "path":
75 | self.path_args.append(key)
76 | else:
77 | self.query_args.append(key)
78 |
79 | def execute(self, body: str = "", **k_args: Dict[str, str]):
80 | """executes the remote REST call for this Method"""
81 | path_args: Dict[str, Dict] = {
82 | k: k_args[k] for k in self.path_args if k in k_args
83 | }
84 | query_args: Dict[str, Dict] = {
85 | k: k_args[k] for k in self.query_args if k in k_args
86 | }
87 | path: str = self.service.base_url + self.make_path(path_args)
88 | if body:
89 | body = dumps(body)
90 |
91 | log.trace( # type: ignore
92 | "\nREQUEST: %s to %s params=%s\n%s",
93 | self.httpMethod,
94 | path,
95 | query_args,
96 | body,
97 | )
98 | result = self.service.auth_session.request(
99 | self.httpMethod, data=body, url=path, timeout=10, params=query_args
100 | )
101 | log.trace( # type: ignore
102 | "\nRESPONSE: %s\n%s", result.status_code, str(result.content)
103 | )
104 |
105 | try:
106 | result.raise_for_status()
107 | except HTTPError:
108 | log.error(
109 | "Request failed with status {}: {}".format(
110 | result.status_code, str(result.content)
111 | )
112 | )
113 | raise
114 | return result
115 |
116 | def make_path(self, path_args: Dict[str, Any]) -> str:
117 | """Extracts the arguments from path_args and inserts them into
118 | the URL template defined in self.path
119 |
120 | Returns:
121 | The URL with inserted parameters
122 | """
123 | result = str(self.path)
124 | path_params = []
125 | for key, value in path_args.items():
126 | path_param = "{{+{}}}".format(key)
127 | if path_param in result:
128 | result = result.replace("{{+{}}}".format(key), value)
129 | path_params.append(key)
130 | for key in path_params:
131 | path_args.pop(key)
132 | return result
133 |
134 |
135 | class Collection:
136 | """Used to represent a collection of methods
137 | e.g. Google Photos API - mediaItems"""
138 |
139 | def __init__(self, name: str):
140 | self.collection_name = name
141 |
--------------------------------------------------------------------------------
/src/gphotos_sync/sql/gphotos_create.sql:
--------------------------------------------------------------------------------
1 | drop table if exists Albums;
2 | create table Albums
3 | (
4 | RemoteId TEXT
5 | primary key,
6 | AlbumName TEXT,
7 | Size INT,
8 | Description TEXT,
9 | StartDate INT,
10 | EndDate INT,
11 | SyncDate INT,
12 | Downloaded INT DEFAULT 0,
13 | IsSharedAlbum BOOL
14 | )
15 | ;
16 | DROP INDEX IF EXISTS Albums_RemoteId_uindex;
17 | DROP INDEX IF EXISTS Albums_StartDate_index;
18 | DROP INDEX IF EXISTS Albums_AlbumName_index;
19 |
20 | create unique index Albums_RemoteId_uindex
21 | on Albums (RemoteId)
22 | ;
23 | create index Albums_AlbumName_index
24 | on Albums (AlbumName)
25 | ;
26 | create index Albums_StartDate_index
27 | on Albums (StartDate)
28 | ;
29 |
30 |
31 | drop table if exists LocalFiles;
32 | create table LocalFiles
33 | (
34 | Id INTEGER
35 | primary key,
36 | RemoteId TEXT default '',
37 | Uid Text,
38 | Path TEXT,
39 | FileName TEXT,
40 | OriginalFileName TEXT,
41 | DuplicateNo INT,
42 | MimeType TEXT,
43 | Description TEXT,
44 | FileSize INT,
45 | ModifyDate INT,
46 | CreateDate INT,
47 | SyncDate INT
48 | );
49 |
50 | DROP INDEX IF EXISTS LocalRemoteIdIdx;
51 | DROP INDEX IF EXISTS LocalUidIdx;
52 | DROP INDEX IF EXISTS LocalNameIdx;
53 | DROP INDEX IF EXISTS LocalCreatedIdx;
54 | DROP INDEX IF EXISTS LocalMatchIdx;
55 | DROP INDEX IF EXISTS LocalFiles_Path_FileName_DuplicateNo_uindex;
56 | create index LocalRemoteIdIdx on LocalFiles (RemoteId);
57 | create index LocalUidIdx on LocalFiles (Uid);
58 | create index LocalNameIdx on LocalFiles (FileName);
59 | create index LocalCreatedIdx on LocalFiles (CreateDate);
60 | create index LocalMatchIdx on LocalFiles (OriginalFileName, DuplicateNo, Description);
61 | create unique index LocalFiles_Path_FileName_DuplicateNo_uindex
62 | on LocalFiles (Path, FileName, DuplicateNo);
63 |
64 | drop table if exists SyncFiles;
65 | create table SyncFiles
66 | (
67 | Id INTEGER
68 | primary key,
69 | RemoteId TEXT,
70 | Uid Text,
71 | Url TEXT,
72 | Path TEXT,
73 | FileName TEXT,
74 | OrigFileName TEXT,
75 | DuplicateNo INT,
76 | MimeType TEXT,
77 | Description TEXT,
78 | FileSize INT,
79 | ModifyDate INT,
80 | CreateDate INT,
81 | SyncDate INT,
82 | Downloaded INT DEFAULT 0,
83 | Location Text
84 | );
85 |
86 | DROP INDEX IF EXISTS RemoteIdIdx;
87 | DROP INDEX IF EXISTS UidIdx;
88 | DROP INDEX IF EXISTS FileNameIdx;
89 | DROP INDEX IF EXISTS FileSizeIdx;
90 | DROP INDEX IF EXISTS FileSizeAndSizeIdx;
91 | DROP INDEX IF EXISTS CreatedIdx;
92 | DROP INDEX IF EXISTS ModifyDateIdx;
93 | DROP INDEX IF EXISTS SyncMatchIdx;
94 | DROP INDEX IF EXISTS SyncFiles_Path_FileName_DuplicateNo_uindex;
95 | create unique index RemoteIdIdx on SyncFiles (RemoteId);
96 | create index FileNameIdx on SyncFiles (FileName);
97 | create index UidIdx on SyncFiles (Uid);
98 | create index FileSizeIdx on SyncFiles (FileSize);
99 | create index FileSizeAndSizeIdx on SyncFiles (FileName, FileSize);
100 | create index CreatedIdx on SyncFiles (CreateDate);
101 | create index ModifyDateIdx on SyncFiles (ModifyDate);
102 | create index SyncMatchIdx on SyncFiles (OrigFileName, DuplicateNo, Description);
103 | create unique index SyncFiles_Path_FileName_DuplicateNo_uindex
104 | on SyncFiles (Path, FileName, DuplicateNo);
105 |
106 |
107 | drop table if exists AlbumFiles;
108 | create table AlbumFiles
109 | (
110 | Id INTEGER
111 | primary key,
112 | AlbumRec INT,
113 | DriveRec INT,
114 | Position INT,
115 | foreign key (AlbumRec) references Albums (RemoteId)
116 | on delete cascade,
117 | foreign key (DriveRec) references SyncFiles (Id)
118 | on update cascade on delete cascade)
119 | ;
120 | DROP INDEX IF EXISTS AlbumFiles_AlbumRec_DriveRec_uindex;
121 | create unique index AlbumFiles_AlbumRec_DriveRec_uindex
122 | on AlbumFiles (AlbumRec, DriveRec);
123 |
124 | drop table if exists Globals;
125 | CREATE TABLE Globals
126 | (
127 | Id INTEGER,
128 | Version TEXT,
129 | Albums INTEGER,
130 | Files INTEGER,
131 | LastIndex INT -- Date of last sync
132 | );
133 | CREATE UNIQUE INDEX Globals_Id_uindex ON Globals (Id);
134 |
135 |
136 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/tests/__init__.py
--------------------------------------------------------------------------------
/tests/test-data/1987-JohnWoodAndGiles.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/tests/test-data/1987-JohnWoodAndGiles.jpg
--------------------------------------------------------------------------------
/tests/test-data/20180126_185832.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/tests/test-data/20180126_185832.jpg
--------------------------------------------------------------------------------
/tests/test-data/IMG_20180908_132733-gphotos.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/tests/test-data/IMG_20180908_132733-gphotos.jpg
--------------------------------------------------------------------------------
/tests/test-data/IMG_20180908_132733-insync.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/tests/test-data/IMG_20180908_132733-insync.jpg
--------------------------------------------------------------------------------
/tests/test-data/IMG_20190102_112832.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/tests/test-data/IMG_20190102_112832.jpg
--------------------------------------------------------------------------------
/tests/test-data/PIC00002 (2).jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/tests/test-data/PIC00002 (2).jpg
--------------------------------------------------------------------------------
/tests/test-data/PIC00002.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/tests/test-data/PIC00002.jpg
--------------------------------------------------------------------------------
/tests/test_account.py:
--------------------------------------------------------------------------------
1 | from datetime import date
2 |
3 | """This file describes the contents of the test account photos library"""
4 |
5 |
6 | class TestAccount:
7 | latest_date = date(2020, 4, 26)
8 |
9 | image_years = [2020, 2019, 2017, 2016, 2015, 2014, 2001, 2000, 1998, 1965]
10 | # 10 images in each of the years in the test data
11 | # plus 5 shared items in the 2017 shared album
12 | images_per_year = [1, 0, 10, 10, 10, 10, 10, 10, 10, 10]
13 | shared_images_per_year = [0, 0, 5, 0, 0, 0, 0, 0, 0, 0]
14 | shared_album_images_per_year = [0, 6, 0, 0, 0, 0, 0, 0, 0, 0]
15 | videos_per_year = [0, 0, 10, 0, 0, 0, 0, 0, 0, 0, 0]
16 |
17 | image_count = sum(images_per_year)
18 | shared_image_count = sum(shared_images_per_year)
19 | video_count = sum(videos_per_year)
20 | total_count = image_count + video_count
21 |
22 | # shared test album has 'show in albums' so does appear in our albums list
23 | # 5 of its files are ours and 5 shared by the real giles knap
24 | album_names = [
25 | r"1001?Shared?Test?Album",
26 | r"0101?Album?2001",
27 | r"0528?Movies",
28 | r"0923?Clones😀",
29 | r"0926?Album?2016",
30 | r"1207?Same?Names",
31 | r"0426?Name?with?Comma",
32 | ]
33 | album_years = [2019, 2001, 2017, 2017, 2016, 2014, 2020]
34 | album_images = [5, 10, 10, 4, 16, 10, 1]
35 | album_shared_images = [5, 0, 0, 0, 0, 0, 0]
36 | album_count = len(album_names)
37 | album_image_count = sum(album_images)
38 | album_shared_image_count = sum(album_shared_images)
39 |
40 | shared_album_names = [r"0220?Noah?s?transformer?lego"]
41 | shared_album_images = [0]
42 | shared_album_shared_images = [6]
43 | shared_album_count = len(shared_album_names)
44 | shared_album_image_count = sum(shared_album_images)
45 | shared_album_shared_image_count = sum(shared_album_shared_images)
46 |
47 | # subset of items from 2016-01-01 to 2017-01-01 for quicker tests
48 | start = "2016-01-01"
49 | end = "2017-01-01"
50 | image_count_2016 = 10
51 | item_count_2017 = 20
52 | item_count_2020 = 1
53 |
--------------------------------------------------------------------------------
/tests/test_boilerplate_removed.py:
--------------------------------------------------------------------------------
1 | """
2 | This file checks that all the example boilerplate text has been removed.
3 | It can be deleted when all the contained tests pass
4 | """
5 |
6 | import sys
7 | from pathlib import Path
8 |
9 | if sys.version_info < (3, 8):
10 | from importlib_metadata import metadata # noqa
11 | else:
12 | from importlib.metadata import metadata # noqa
13 |
14 | ROOT = Path(__file__).parent.parent
15 |
16 |
17 | def skeleton_check(check: bool, text: str):
18 | if ROOT.name == "python3-pip-skeleton" or str(ROOT) == "/project":
19 | # In the skeleton module the check should fail
20 | check = not check
21 | text = f"Skeleton didn't raise: {text}"
22 | if check:
23 | raise AssertionError(text)
24 |
25 |
26 | def assert_not_contains_text(path: str, text: str, explanation: str):
27 | full_path = ROOT / path
28 | if full_path.exists():
29 | contents = full_path.read_text().replace("\n", " ")
30 | skeleton_check(text in contents, f"Please change ./{path} {explanation}")
31 |
32 |
33 | # pyproject.toml
34 | def test_module_summary():
35 | summary = metadata("gphotos-sync")["summary"]
36 | skeleton_check(
37 | "One line description of your module" in summary,
38 | "Please change project.description in ./pyproject.toml "
39 | "to be a one line description of your module",
40 | )
41 |
42 |
43 | # README
44 | def test_changed_README_intro():
45 | assert_not_contains_text(
46 | "README.rst",
47 | "This is where you should write a short paragraph",
48 | "to include an intro on what your module does",
49 | )
50 |
51 |
52 | def test_removed_adopt_skeleton():
53 | assert_not_contains_text(
54 | "README.rst",
55 | "This project contains template code only",
56 | "remove the note at the start",
57 | )
58 |
59 |
60 | def test_changed_README_body():
61 | assert_not_contains_text(
62 | "README.rst",
63 | "This is where you should put some images or code snippets",
64 | "to include some features and why people should use it",
65 | )
66 |
--------------------------------------------------------------------------------
/tests/test_cli.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 | import sys
3 |
4 | from gphotos_sync import __version__
5 |
6 |
7 | def test_cli_version():
8 | cmd = [sys.executable, "-m", "gphotos_sync", "--version"]
9 | assert subprocess.check_output(cmd).decode().strip() == __version__
10 |
--------------------------------------------------------------------------------
/tests/test_credentials/.gphotos.token:
--------------------------------------------------------------------------------
1 | {"access_token": "ya29.a0AfB_byAreGK4x3LmWMoEdyxynFhxmVdluIku4_wrxknJNMivmNheLMhz1UT2bya7Oq_sKzgljfBXgEvVl5ODSq6XRu53doa0zOzeKJiEbl9KbdqqTlbVy2r5HV_FJ2weHCzihyhDkUtcFduPSsLQNFffLjZubPLvTGDtdN0aCgYKAXISARMSFQHGX2MiqhOMZleHPcJE2ubZkU7jvw0174", "expires_in": 3599, "scope": ["https://www.googleapis.com/auth/photoslibrary.sharing", "https://www.googleapis.com/auth/photoslibrary.readonly"], "token_type": "Bearer", "expires_at": 1704573379.582039, "refresh_token": "1//03CEqAzsnP-8PCgYIARAAGAMSNwF-L9Irz4_ilhRw0HIwVImT4gTCUPlV8YaCTYQiIjD4juWOI5eQh_-Rzh9nTmBND0jliOnabq4"}
--------------------------------------------------------------------------------
/tests/test_credentials/client_secret.json:
--------------------------------------------------------------------------------
1 | {"installed":{"client_id":"922694661112-hso5f9cqnuafdqap2k9nkc1qbatlf5am.apps.googleusercontent.com","project_id":"gphotostwo","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"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","redirect_uris":["http://localhost"]}}
--------------------------------------------------------------------------------
/tests/test_full_library.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | from pathlib import Path
3 | from unittest import TestCase
4 |
5 | import tests.test_setup as ts
6 | from gphotos_sync.LocalData import LocalData
7 | from tests.test_account import TestAccount
8 |
9 | photos_root = Path("photos")
10 | albums_root = Path("albums")
11 | comparison_root = Path("comparison")
12 |
13 |
14 | class TestSystem(TestCase):
15 | def test_sys_whole_library(self):
16 | warnings.filterwarnings(
17 | action="ignore", message="unclosed", category=ResourceWarning
18 | )
19 | """Download all images in test library. Check filesystem for correct
20 | files
21 | Check DB for correct entries
22 | Note, if you select --skip-video then we use the search API instead
23 | of list
24 | This then misses these 2 files:
25 | subaru1.jpg|photos/1998/10
26 | subaru2.jpg|photos/1998/10
27 | todo investigate above
28 | """
29 | with ts.SetupDbAndCredentials() as s:
30 | s.test_setup("test_sys_whole_library", trash_files=True, trash_db=True)
31 | s.gp.main([str(s.root), "--skip-shared-albums", "--progress"])
32 |
33 | db = LocalData(s.root)
34 |
35 | db.cur.execute("SELECT COUNT() FROM SyncFiles")
36 | count = db.cur.fetchone()
37 | t = (
38 | TestAccount.image_count
39 | + TestAccount.video_count
40 | + TestAccount.shared_image_count
41 | )
42 | self.assertEqual(
43 | t, count[0], "expected {} items excluding shared albums".format(t)
44 | )
45 |
46 | db.cur.execute("SELECT COUNT() FROM SyncFiles where MimeType like 'video%'")
47 | count = db.cur.fetchone()
48 | self.assertEqual(TestAccount.video_count, count[0])
49 |
50 | db.cur.execute("SELECT COUNT() FROM Albums;")
51 | count = db.cur.fetchone()
52 | t = TestAccount.album_count
53 | self.assertEqual(t, count[0], "expected {} total album count".format(t))
54 |
55 | for year, images, shared, videos in zip(
56 | TestAccount.image_years,
57 | TestAccount.images_per_year,
58 | TestAccount.shared_images_per_year,
59 | TestAccount.videos_per_year,
60 | ):
61 | # looking for .jpg .JPG .png .jfif
62 | pat = str(photos_root / str(year) / "*" / "*.[JjpP]*")
63 | self.assertEqual(
64 | images + shared,
65 | len(sorted(s.root.glob(pat))),
66 | "mismatch on image file count for year {}".format(year),
67 | )
68 | # looking for *.mp4
69 | pat = str(photos_root / str(year) / "*" / "*.mp4")
70 | self.assertEqual(
71 | videos,
72 | len(sorted(s.root.glob(pat))),
73 | "mismatch on video file count for year {}".format(year),
74 | )
75 |
76 | for idx, a in enumerate(TestAccount.album_names):
77 | pat = str(albums_root / "*" / a / "*")
78 | t = TestAccount.album_images[idx] + TestAccount.album_shared_images[idx]
79 | self.assertEqual(
80 | t,
81 | len(sorted(s.root.glob(pat))),
82 | "album {} does not contain {} images".format(
83 | a, TestAccount.album_images[idx]
84 | ),
85 | )
86 |
87 | # check that the most recent scanned file date was recorded
88 | d_date = db.get_scan_date()
89 | self.assertEqual(d_date.date(), TestAccount.latest_date)
90 |
91 | # check that re-running does not get any db constraint violations etc.
92 | # also test the comparison feature, by comparing the library with its
93 | # own gphotos-sync output
94 | s.__exit__()
95 | s.test_setup(
96 | "test_sys_whole_library", args=["--compare-folder", str(s.root)]
97 | )
98 | s.gp.start(s.parsed_args)
99 |
100 | # There is one pair of files that are copies of the same image with
101 | # same UID. This looks like one pair of duplicates and one extra file
102 | # in the comparison folder. (also the gphotos database etc appear
103 | # as missing files)
104 | pat = str(comparison_root / "missing_files" / "*")
105 | files = sorted(s.root.glob(pat))
106 | self.assertEqual(0, len(files), "expected 0 missing files")
107 | pat = str(comparison_root / "extra_files" / "*" / "*" / "*" / "*")
108 | files = sorted(s.root.glob(pat))
109 | self.assertEqual(0, len(files), "expected 0 extra files")
110 | pat = str(comparison_root / "duplicates" / "*")
111 | files = sorted(s.root.glob(pat))
112 | self.assertEqual(0, len(files), "expected 0 duplicate files")
113 |
--------------------------------------------------------------------------------
/tests/test_setup.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import shutil
3 | from pathlib import Path
4 |
5 | from appdirs import AppDirs
6 |
7 | from gphotos_sync import __main__
8 | from gphotos_sync.__main__ import GooglePhotosSyncMain
9 | from gphotos_sync.Checks import do_check
10 |
11 | # if we are debugging requests library is too noisy
12 | logging.getLogger("requests").setLevel(logging.WARNING)
13 | logging.getLogger("requests_oauthlib").setLevel(logging.WARNING)
14 | logging.getLogger("urllib3").setLevel(logging.WARNING)
15 | logging.basicConfig(
16 | level=logging.DEBUG,
17 | format="%(asctime)s %(name)-12s %(levelname)-8s " "%(message)s",
18 | datefmt="%m-%d %H:%M:%S",
19 | filemode="w",
20 | )
21 |
22 |
23 | class SetupDbAndCredentials:
24 | def __init__(self):
25 | # set up the test account credentials
26 | __main__.APP_NAME = "gphotos-sync-test"
27 | app_dirs = AppDirs(__main__.APP_NAME)
28 | self.test_folder = Path(__file__).absolute().parent / "test_credentials"
29 | user_data = Path(app_dirs.user_data_dir)
30 | if not user_data.exists():
31 | user_data.mkdir(parents=True)
32 | user_config = Path(app_dirs.user_config_dir)
33 | if not user_config.exists():
34 | user_config.mkdir(parents=True)
35 |
36 | secret_file = self.test_folder / "client_secret.json"
37 | shutil.copy(secret_file, app_dirs.user_config_dir)
38 |
39 | self.gp = GooglePhotosSyncMain()
40 | self.parsed_args = None
41 | self.db_file = None
42 | self.root = None
43 |
44 | def __enter__(self):
45 | return self
46 |
47 | def __exit__(self, exc_type=None, exc_value=None, traceback=None):
48 | self.gp.google_photos_down.close()
49 |
50 | def test_setup(self, test_name, args=None, trash_db=False, trash_files=False):
51 | self.root = Path("/tmp/gpTests/{}".format(test_name))
52 |
53 | self.db_file = self.root / "gphotos.sqlite"
54 | if trash_files:
55 | if self.root.exists():
56 | shutil.rmtree(self.root)
57 | elif trash_db:
58 | self.db_file.unlink()
59 | if not self.root.exists():
60 | self.root.mkdir(parents=True)
61 |
62 | do_check(self.root)
63 | # make retries big so that CI can get past
64 | # HTTPError: 429 Client Error: Too Many Requests for url
65 | # on the google API URLS (when re-running CI frequently)
66 | all_args = [str(self.root), "--log-level", "error", "--max-retries", "200"]
67 | if args:
68 | all_args += args
69 |
70 | credentials_file = self.test_folder / ".gphotos.token"
71 | shutil.copy(credentials_file, self.root)
72 |
73 | self.parsed_args = self.gp.parser.parse_args(all_args)
74 | self.parsed_args.root_folder = Path(self.parsed_args.root_folder)
75 | self.gp.setup(self.parsed_args, Path(self.root))
76 |
77 | def test_done(self):
78 | self.gp.data_store.store()
79 |
--------------------------------------------------------------------------------
/tests/test_system/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/tests/test_system/__init__.py
--------------------------------------------------------------------------------
/tests/test_system/test_database.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | from gphotos_sync.LocalData import LocalData
4 | from tests.test_setup import SetupDbAndCredentials
5 |
6 |
7 | class DatabaseTest(TestCase):
8 | def test_new_schema(self):
9 | """
10 | check that the database initialization errors if the version of the
11 | data store is newer than the code version
12 | UPDATE: use --fave so that we do download a photo. A previous bug
13 | was only picked up when this replaced --skip-files"""
14 | with SetupDbAndCredentials() as s:
15 | # get a single file
16 | args = ["--favourites-only", "--skip-albums"]
17 | s.test_setup("new_schema", args=args, trash_files=True)
18 | s.gp.start(s.parsed_args)
19 |
20 | db = LocalData(s.root)
21 | db.cur.execute("UPDATE Globals SET Version = 1.0 WHERE Id IS 1")
22 | db.store()
23 | db.con.close()
24 |
25 | s.__exit__()
26 | s.test_setup("new_schema", args=args)
27 | s.gp.start(s.parsed_args)
28 |
29 | db = LocalData(s.root)
30 | db.cur.execute("SELECT Version From Globals WHERE Id IS 1")
31 | version = float(db.cur.fetchone()[0])
32 | self.assertEqual(version, LocalData.VERSION)
33 |
34 | db.cur.execute("UPDATE Globals SET Version = 100.0 WHERE Id IS 1")
35 | db.store()
36 |
37 | with self.assertRaises(ValueError):
38 | s.__exit__()
39 | s.test_setup("new_schema", args=args)
40 |
--------------------------------------------------------------------------------
/tests/test_system/test_network.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | from pathlib import Path
3 | from unittest import TestCase
4 | from unittest.mock import Mock, patch
5 |
6 | from requests import Session
7 | from requests.exceptions import HTTPError
8 |
9 | import tests.test_setup as ts
10 | from gphotos_sync.LocalData import LocalData
11 | from tests.test_account import TestAccount
12 |
13 | photos_root = Path("photos")
14 | original_get = Session.get
15 | call_count = 0
16 |
17 |
18 | def patched_get(self, url, stream=True, timeout=20):
19 | global call_count
20 | call_count += 1
21 | # succeed occasionally only
22 | succeed = call_count % 10 == 0
23 | if "discovery" in url or succeed:
24 | return original_get(self, url, stream=stream, timeout=timeout)
25 | else:
26 | raise HTTPError(Mock(status=500), "ouch!")
27 |
28 |
29 | class TestNetwork(TestCase):
30 | @patch.object(Session, "get", patched_get)
31 | def test_max_retries_hit(self):
32 | warnings.filterwarnings(
33 | action="ignore", message="unclosed", category=ResourceWarning
34 | )
35 |
36 | with ts.SetupDbAndCredentials() as s:
37 | args = ["--skip-albums"]
38 | s.test_setup(
39 | "test_max_retries_hit", args=args, trash_files=True, trash_db=True
40 | )
41 | s.gp.start(s.parsed_args)
42 |
43 | db = LocalData(s.root)
44 |
45 | db.cur.execute("SELECT COUNT() FROM SyncFiles")
46 | count = db.cur.fetchone()
47 | self.assertEqual(TestAccount.total_count, count[0])
48 |
49 | pat = str(photos_root / "*" / "*" / "*")
50 | self.assertEqual(
51 | 9, len(sorted(s.root.glob(pat))), "mismatch on image file count"
52 | )
53 |
--------------------------------------------------------------------------------
/tests/test_system/test_regression.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import stat
4 | from unittest import TestCase
5 | from unittest.mock import PropertyMock, patch
6 |
7 | import tests.test_setup as ts
8 | from gphotos_sync.GooglePhotosIndex import GooglePhotosIndex
9 | from gphotos_sync.LocalData import LocalData
10 | from tests.test_account import TestAccount
11 |
12 |
13 | class TestSystem(TestCase):
14 | def test_no_album_index(self):
15 | """for issue #89 - photos directly uploaded into albums dont 'list'"""
16 | with ts.SetupDbAndCredentials() as s:
17 | args = ["--no-album-index", "--skip-shared-albums", "--index-only"]
18 | s.test_setup(
19 | "test_no_album_index", trash_files=True, trash_db=True, args=args
20 | )
21 | s.gp.start(s.parsed_args)
22 |
23 | db = LocalData(s.root)
24 |
25 | # There are 95 items but 10 were uploaded direct into a folder
26 | # so --no-album-index may affect them (but does not)
27 | # Also 5 are shared from another account (skipped due to
28 | # --skip-shared-albums AND --no-album-index)
29 | db.cur.execute("SELECT COUNT() FROM SyncFiles")
30 | count = db.cur.fetchone()
31 | # this was an attempt to prove that creating a folder and uploading
32 | # directly to it in google photos web would reproduce
33 | # https://github.com/gilesknap/gphotos-sync/issues/89
34 | # if it had done so then we would only get 80 files
35 | t = TestAccount.image_count + TestAccount.video_count
36 | self.assertEqual(
37 | t, count[0], "expected {} files with album index off".format(t)
38 | )
39 |
40 | @patch.object(GooglePhotosIndex, "PAGE_SIZE", new_callable=PropertyMock)
41 | def test_zero_items_in_response(self, page_size):
42 | """
43 | for issue https://github.com/gilesknap/gphotos-sync/issues/112
44 | """
45 | # note this fails with page size below 5 and that might be another API
46 | # bug
47 | # to emulate issue #112 remove the date range and set page_size = 2
48 | # this then does download everything via media_items.list but sometimes
49 | # gets zero items with a next_page token (too expensive on quota to
50 | # always leave it like this.)
51 | page_size.return_value = 6
52 |
53 | with ts.SetupDbAndCredentials() as s:
54 | args = [
55 | "--skip-albums",
56 | "--index-only",
57 | "--start-date",
58 | "1965-01-01",
59 | "--end-date",
60 | "1965-12-31",
61 | ]
62 | s.test_setup(
63 | "test_zero_items_in_response",
64 | trash_files=True,
65 | trash_db=True,
66 | args=args,
67 | )
68 | s.gp.start(s.parsed_args)
69 |
70 | db = LocalData(s.root)
71 |
72 | db.cur.execute("SELECT COUNT() FROM SyncFiles")
73 | count = db.cur.fetchone()
74 | self.assertEqual(10, count[0], "expected 10 images 1965")
75 |
76 | # this test does not work on windows - it does not throw an error so it
77 | # seems chmod fails to have an effect
78 | def ___test_folder_not_writeable(self):
79 | # make sure we get permissions error and not 'database is locked'
80 | s = ts.SetupDbAndCredentials()
81 | s.test_setup("test_folder_not_writeable", trash_files=True, trash_db=True)
82 | try:
83 | if os.name == "nt":
84 | os.chmod(str(s.root), stat.S_IREAD)
85 | else:
86 | s.root.chmod(0o444)
87 | with self.assertRaises(PermissionError):
88 | s.gp.main([str(s.root), "--skip-shared-albums"])
89 | finally:
90 | if os.name == "nt":
91 | os.chmod(str(s.root), stat.S_IWRITE | stat.S_IREAD)
92 | else:
93 | os.chmod(str(s.root), 0o777)
94 | shutil.rmtree(str(s.root))
95 |
--------------------------------------------------------------------------------
/tests/test_system/test_requests.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from datetime import datetime
3 | from unittest import TestCase
4 |
5 | from requests import Session, exceptions
6 | from requests.adapters import HTTPAdapter
7 | from urllib3.util.retry import Retry
8 |
9 | log = logging.getLogger(__name__)
10 |
11 |
12 | class TestRequests(TestCase):
13 | """
14 | (Not testing code in this project)
15 | This is just testing my understanding of retry and exceptions in
16 | requests / urllib3. It was not easy to work out what exceptions to
17 | expect so I'm keeping this code as a reminder.
18 | """
19 |
20 | def test_retries_500(self):
21 | retries = 5
22 | timeout = 2
23 |
24 | session = Session()
25 | start = datetime.now()
26 | result = session.get("https://httpbin.org/status/500", timeout=timeout)
27 | self.assertEqual(result.status_code, 500)
28 | elapsed = datetime.now() - start
29 |
30 | retry = Retry(
31 | total=retries,
32 | backoff_factor=0.1,
33 | status_forcelist=[500, 502, 503, 504],
34 | allowed_methods=frozenset(["GET", "POST"]),
35 | raise_on_status=False,
36 | respect_retry_after_header=True,
37 | )
38 |
39 | session.close()
40 | session.mount("https://", HTTPAdapter(max_retries=retry))
41 |
42 | start = datetime.now()
43 | result = session.get("https://httpbin.org/status/500", timeout=timeout)
44 | elapsed2 = datetime.now() - start
45 | self.assertEqual(result.status_code, 500)
46 | self.assertGreater(elapsed2, elapsed * (retries - 1))
47 | session.close()
48 |
49 | def test_retries_timeout(self):
50 | retries = 3
51 | timeout = 1
52 | retry_error = False
53 |
54 | session = Session()
55 | retry = Retry(
56 | total=retries,
57 | backoff_factor=0.1,
58 | status_forcelist=[500, 502, 503, 504],
59 | allowed_methods=frozenset(["GET", "POST"]),
60 | raise_on_status=False,
61 | respect_retry_after_header=True,
62 | )
63 |
64 | session.mount("https://", HTTPAdapter(max_retries=retry))
65 |
66 | start = datetime.now()
67 | try:
68 | _ = session.get("https://httpbin.org/delay/5", timeout=timeout)
69 | except exceptions.ConnectionError as e:
70 | retry_error = True
71 | print(e)
72 |
73 | elapsed = datetime.now() - start
74 | self.assertEqual(retry_error, True)
75 | self.assertGreater(elapsed.seconds, retries * timeout)
76 |
--------------------------------------------------------------------------------
/tests/test_units/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gilesknap/gphotos-sync/3fb450789afc48d2461dc2fd1fa42197e856abee/tests/test_units/__init__.py
--------------------------------------------------------------------------------
/tests/test_units/test_errors.py:
--------------------------------------------------------------------------------
1 | # type: ignore
2 | import os
3 | from pathlib import Path
4 | from unittest import TestCase
5 |
6 | import pytest
7 | from mock import PropertyMock, patch
8 |
9 | import gphotos_sync.authorize as auth
10 | import tests.test_setup as ts
11 | from gphotos_sync.BaseMedia import BaseMedia
12 | from gphotos_sync.Checks import do_check
13 | from gphotos_sync.DatabaseMedia import DatabaseMedia
14 | from gphotos_sync.DbRow import DbRow
15 | from gphotos_sync.GoogleAlbumMedia import GoogleAlbumMedia
16 | from gphotos_sync.GoogleAlbumsRow import GoogleAlbumsRow
17 | from gphotos_sync.LocalData import LocalData
18 |
19 | photos_root = Path("photos")
20 | albums_root = Path("albums")
21 | comparison_root = Path("comparison")
22 |
23 | # todo I'm using assert here and hence do not really need to use TestCae clasee
24 | # I should probably switch to using a consistent approach as per new Diamond
25 | # guidelines which is pytest only with no unittest classes (throughout all test files)
26 |
27 |
28 | class TestErrors(TestCase):
29 | """
30 | Tests to cover failure paths to complete test coverage.
31 | Also used to cover other unusual paths such as
32 | Windows os
33 | pure virtual classes
34 | etc.
35 | """
36 |
37 | @patch(
38 | "gphotos_sync.authorize.InstalledAppFlow.run_local_server",
39 | return_value="dummy_response_string",
40 | )
41 | @patch(
42 | "gphotos_sync.authorize.InstalledAppFlow.authorized_session",
43 | return_value="dummy_seaaion",
44 | )
45 | def test_authorize(self, local_server, authorized_session):
46 | scope = [
47 | "https://www.googleapis.com/auth/photoslibrary.readonly",
48 | "https://www.googleapis.com/auth/photoslibrary.sharing",
49 | ]
50 |
51 | bad_file: Path = (
52 | Path(__file__).absolute().parent.parent
53 | / "test_credentials"
54 | / ".no-token-here"
55 | )
56 | secrets_file: Path = (
57 | Path(__file__).absolute().parent.parent
58 | / "test_credentials"
59 | / "client_secret.json"
60 | )
61 | # test_data: Path = Path(__file__).absolute().parent.parent / 'test-data'
62 | # token_file: Path = Path(__file__).absolute().parent.parent / \
63 | # 'test_credentials' / '.gphotos.token'
64 |
65 | if bad_file.exists():
66 | bad_file.unlink()
67 | with pytest.raises(SystemExit) as test_wrapped_e:
68 | a = auth.Authorize(scope, bad_file, bad_file)
69 | assert test_wrapped_e.type is SystemExit
70 |
71 | a = auth.Authorize(scope, bad_file, secrets_file)
72 | res = a.load_token()
73 | assert res is None
74 |
75 | def test_base_media(self):
76 | """Download archived images in test library using flat folders (and
77 | windows file name restrictions)
78 | """
79 | b = BaseMedia()
80 |
81 | with pytest.raises(NotImplementedError):
82 | x = b.size
83 |
84 | with pytest.raises(NotImplementedError):
85 | x = b.id
86 |
87 | with pytest.raises(NotImplementedError):
88 | x = b.description
89 |
90 | with pytest.raises(NotImplementedError):
91 | x = b.orig_name
92 |
93 | with pytest.raises(NotImplementedError):
94 | x = b.create_date
95 |
96 | with pytest.raises(NotImplementedError):
97 | x = b.modify_date
98 |
99 | with pytest.raises(NotImplementedError):
100 | x = b.mime_type
101 |
102 | with pytest.raises(NotImplementedError):
103 | x = b.url
104 | print(x) # for pylint
105 |
106 | with ts.SetupDbAndCredentials() as s:
107 | args = [
108 | "--skip-albums",
109 | "--start-date",
110 | "2020-01-01",
111 | "--use-flat-path",
112 | ]
113 | s.test_setup("test_base_media", args=args, trash_files=True, trash_db=True)
114 | s.gp.start(s.parsed_args)
115 |
116 | db = LocalData(s.root)
117 |
118 | # Total of 1 out of media items
119 | db.cur.execute("SELECT COUNT() FROM SyncFiles")
120 | count = db.cur.fetchone()
121 | self.assertEqual(1, count[0])
122 |
123 | pat = str(photos_root / "2020-04" / "*.*")
124 | files = sorted(s.root.glob(pat))
125 | self.assertEqual(1, len(files))
126 |
127 | @staticmethod
128 | def test_checks():
129 | a_path = Path("/tmp")
130 | c = do_check(a_path)
131 | assert c.is_linux
132 |
133 | with patch(
134 | "gphotos_sync.Checks.Path.symlink_to", side_effect=FileNotFoundError()
135 | ):
136 | assert not c._symlinks_supported()
137 |
138 | with patch("gphotos_sync.Checks.Path.unlink", side_effect=FileNotFoundError()):
139 | assert not c._check_case_sensitive()
140 |
141 | with patch("gphotos_sync.Checks.Path.glob", return_value=["a"]):
142 | assert not c._check_case_sensitive()
143 |
144 | with patch(
145 | "gphotos_sync.Checks.subprocess.check_output", side_effect=BaseException()
146 | ):
147 | assert c._get_max_path_length() == 248
148 |
149 | if os.name != "nt":
150 | with patch("gphotos_sync.Checks.os.statvfs", side_effect=BaseException()):
151 | assert c._get_max_filename_length() == 248
152 |
153 | with patch("gphotos_sync.Checks.Path.touch", side_effect=BaseException()):
154 | assert not c._unicode_filenames()
155 |
156 | @staticmethod
157 | def test_database_media():
158 | d = DatabaseMedia()
159 |
160 | assert d.url == ""
161 | assert d.location == ""
162 |
163 | @staticmethod
164 | def test_db_row():
165 | d = DbRow(None)
166 | b = BaseMedia()
167 |
168 | with pytest.raises(NotImplementedError):
169 | x = d.to_media()
170 |
171 | with pytest.raises(NotImplementedError):
172 | x = d.from_media(b)
173 |
174 | with pytest.raises(ValueError):
175 | x = d.make(bad_column=1)
176 | print(x) # for pylint
177 |
178 | if d:
179 | assert False, "empty DBRow returns true as Bool"
180 |
181 | @staticmethod
182 | def test_google_albums_media():
183 | m = GoogleAlbumMedia("")
184 | g = GoogleAlbumsRow(None)
185 | g.from_media(m)
186 |
187 | def download_faves(self, expected=4, no_response=False, trash=True):
188 | # Download favourite images only in test library.
189 | with ts.SetupDbAndCredentials() as s:
190 | args = [
191 | "--album",
192 | "Clones😀",
193 | "--use-flat-path",
194 | "--omit-album-date",
195 | "--rescan",
196 | ]
197 | s.test_setup(
198 | "test_google_albums_sync", args=args, trash_files=trash, trash_db=trash
199 | )
200 | s.gp.start(s.parsed_args)
201 |
202 | with LocalData(s.root) as db:
203 | # Total of 1 out of media items
204 | db.cur.execute("SELECT COUNT() FROM SyncFiles")
205 | count = db.cur.fetchone()
206 | self.assertEqual(expected, count[0])
207 |
208 | class DummyResponse:
209 | @staticmethod
210 | def json():
211 | return {}
212 |
213 | @patch(
214 | "gphotos_sync.GoogleAlbumsSync.PAGE_SIZE",
215 | new_callable=PropertyMock(return_value=1),
216 | )
217 | @patch(
218 | "gphotos_sync.GoogleAlbumsSync.ALBUM_ITEMS",
219 | new_callable=PropertyMock(return_value=1),
220 | )
221 | def test_google_albums_sync(self, page_size, album_items):
222 | # next page in responses (set pageSize = 1) fetch_album_contents()
223 | # blank response.json (empty album - add to test data?)
224 | # also pagesize = 1 in index_albums_type()
225 | # self._omit_album_date = True
226 | # self._use_flat_path = True
227 | # path > Checks.MAX_PATH_LENGTH
228 | # skip hardlink on non-downloaded file (line 272)
229 | # file exists already line 290
230 |
231 | # check that next_page functionality works
232 | # in fetch_album_contents and index_albums_type
233 | self.download_faves()
234 |
235 | # test file exists already in create_album_content_links
236 | with patch("shutil.rmtree"):
237 | self.download_faves(trash=False)
238 |
239 | # check that empty media_json response works
240 | with patch(
241 | "gphotos_sync.restclient.Method.execute", return_value=self.DummyResponse()
242 | ):
243 | self.download_faves(expected=0)
244 |
--------------------------------------------------------------------------------
/tests/test_units/test_local_scan.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from unittest import TestCase
3 |
4 | from gphotos_sync.Checks import do_check
5 | from gphotos_sync.LocalFilesMedia import LocalFilesMedia
6 |
7 | test_data = Path(__file__).absolute().parent.parent / "test-data"
8 |
9 |
10 | class TestLocalScan(TestCase):
11 | def test_local_duplicate_names(self):
12 | ps = "PIC00002 (2).jpg"
13 | p = Path(test_data) / Path(ps)
14 |
15 | # ugly global stuff to make LocalFilesMedia work
16 | do_check(test_data)
17 |
18 | lf = LocalFilesMedia(p)
19 | self.assertEqual(lf.duplicate_number, 1)
20 |
21 | assert str(lf.filename) == ps
22 |
23 | ps = "PIC00002.jpg"
24 | p = Path(test_data) / Path(ps)
25 |
26 | lf = LocalFilesMedia(p)
27 | self.assertEqual(lf.duplicate_number, 0)
28 |
29 | assert str(lf.filename) == ps
30 |
--------------------------------------------------------------------------------
/tests/test_units/test_units.py:
--------------------------------------------------------------------------------
1 | import json
2 | from datetime import datetime
3 | from os import environ
4 | from os import name as os_name
5 | from pathlib import Path
6 | from unittest import TestCase
7 |
8 | import pytest
9 | from requests import exceptions as exc
10 | from requests.adapters import HTTPAdapter
11 | from urllib3 import Retry
12 |
13 | import gphotos_sync.authorize as auth
14 | import tests.test_setup as ts
15 | from gphotos_sync.Checks import do_check, get_check
16 | from gphotos_sync.GoogleAlbumMedia import GoogleAlbumMedia
17 | from gphotos_sync.LocalFilesMedia import LocalFilesMedia
18 |
19 | is_travis = "TRAVIS" in environ
20 |
21 | scope = [
22 | "https://www.googleapis.com/auth/photoslibrary.readonly",
23 | "https://www.googleapis.com/auth/photoslibrary.sharing",
24 | ]
25 |
26 | token_file = (
27 | Path(__file__).absolute().parent.parent / "test_credentials" / ".gphotos.token"
28 | )
29 | secrets_file = (
30 | Path(__file__).absolute().parent.parent / "test_credentials" / "client_secret.json"
31 | )
32 | test_data = Path(__file__).absolute().parent.parent / "test-data"
33 |
34 |
35 | class TestUnits(TestCase):
36 | def test_http_500_retries(self):
37 | a = auth.Authorize(scope, token_file, secrets_file)
38 | a.authorize()
39 |
40 | # speed up error checking by reducing backoff factor
41 | retry = Retry(
42 | total=5,
43 | backoff_factor=0.01,
44 | status_forcelist=[500, 502, 503, 504],
45 | allowed_methods=frozenset(["GET", "POST"]),
46 | raise_on_status=False,
47 | respect_retry_after_header=True,
48 | )
49 |
50 | a.session.mount("https://", HTTPAdapter(max_retries=retry))
51 |
52 | start = datetime.now()
53 |
54 | result = a.session.get("https://httpbin.org/status/500", timeout=30)
55 | self.assertEqual(result.status_code, 500)
56 | elapsed = datetime.now() - start
57 | # timeout should not affect the 5 retries
58 | # but backoff_factor=0.001 will (a little)
59 | self.assertLess(elapsed.seconds, 5)
60 | a.session.close()
61 |
62 | def test_download_timeout(self):
63 | a = auth.Authorize(scope, token_file, secrets_file)
64 | a.authorize()
65 | retry_error = False
66 | start = datetime.now()
67 |
68 | # speed up error checking by reducing backoff factor
69 | retry = Retry(
70 | total=5,
71 | backoff_factor=0.2,
72 | status_forcelist=[500, 502, 503, 504],
73 | allowed_methods=frozenset(["GET", "POST"]),
74 | raise_on_status=False,
75 | respect_retry_after_header=True,
76 | )
77 |
78 | a.session.mount("https://", HTTPAdapter(max_retries=retry))
79 | try:
80 | _ = a.session.get("https://httpbin.org//delay/5", stream=True, timeout=0.2)
81 | except exc.ConnectionError as e:
82 | retry_error = True
83 | print(e)
84 |
85 | a.session.close()
86 | elapsed = datetime.now() - start
87 | self.assertEqual(retry_error, True)
88 | # .2 timeout by 5 retries = 1 sec
89 | self.assertGreater(elapsed.seconds, 1)
90 |
91 | def test_jpg_description(self):
92 | p = test_data / "IMG_20190102_112832.jpg"
93 | lfm = LocalFilesMedia(p)
94 | self.assertEqual(lfm.description, "")
95 |
96 | p = test_data / "20180126_185832.jpg"
97 | lfm = LocalFilesMedia(p)
98 | self.assertEqual(lfm.description, "")
99 |
100 | p = test_data / "1987-JohnWoodAndGiles.jpg"
101 | lfm = LocalFilesMedia(p)
102 | self.assertEqual(lfm.description, "")
103 |
104 | def test_jpg_description2(self):
105 | p = test_data / "IMG_20180908_132733-gphotos.jpg"
106 | lfm = LocalFilesMedia(p)
107 | self.assertEqual(lfm.description, "")
108 |
109 | p = test_data / "IMG_20180908_132733-insync.jpg"
110 | lfm = LocalFilesMedia(p)
111 | self.assertEqual(lfm.description, "")
112 |
113 | def test_empty_media(self):
114 | do_check(test_data)
115 | g = GoogleAlbumMedia(json.loads('{"emptyJson":"0"}'))
116 | self.assertEqual(0, g.size)
117 | self.assertEqual("none", g.mime_type)
118 | self.assertEqual("none", g.description)
119 | self.assertEqual(None, g.create_date)
120 | self.assertEqual(None, g.modify_date)
121 | # noinspection PyBroadException
122 | try:
123 | _ = g.url
124 | assert False, "empty album url should throw"
125 | except Exception:
126 | pass
127 | self.assertEqual(Path("") / "", g.full_folder)
128 | g.duplicate_number = 1
129 | self.assertEqual("none (2)", g.filename)
130 |
131 | def test_bad_filenames(self):
132 | folder = do_check(test_data)
133 |
134 | filename = folder.valid_file_name("hello. ")
135 |
136 | if os_name == "nt":
137 | self.assertEqual(filename, "hello")
138 | else:
139 | self.assertEqual(filename, "hello.")
140 | filename = folder.valid_file_name("hello.😀")
141 | self.assertEqual(filename, "hello.😀")
142 | filename = folder.valid_file_name("hello./")
143 | self.assertEqual(filename, "hello._")
144 |
145 | # patch the checks
146 | folder.is_linux = False
147 | folder.is_unicode = False
148 |
149 | filename = folder.valid_file_name("hello. ")
150 |
151 | self.assertEqual(filename, "hello")
152 | filename = folder.valid_file_name("hello.😀")
153 | self.assertEqual(filename, "hello._")
154 | filename = folder.valid_file_name("hello..")
155 | self.assertEqual(filename, "hello")
156 |
157 | def test_os_filesystem(self):
158 | if is_travis:
159 | pytest.skip(
160 | "skipping windows filesystem test since travis has no NTFS",
161 | allow_module_level=True,
162 | )
163 | if os_name == "nt":
164 | # assume there is a c:\ on the test machine (which is likely)
165 | do_check(Path("C:\\"))
166 | self.assertFalse(get_check().is_linux)
167 | else:
168 | do_check(test_data)
169 | self.assertTrue(get_check().is_linux)
170 |
171 | def test_fs_overrides(self):
172 | with ts.SetupDbAndCredentials() as s:
173 | args = ["--ntfs", "--max-filename", "30"]
174 | s.test_setup(
175 | "test_fs_overrides", args=args, trash_db=True, trash_files=True
176 | )
177 | s.gp.fs_checks(s.root, s.parsed_args)
178 | self.assertFalse(get_check().is_linux)
179 | self.assertEqual(get_check().max_filename, 30)
180 |
181 | if os_name != "nt":
182 | args = []
183 | s.test_setup(
184 | "test_fs_overrides", args=args, trash_db=True, trash_files=True
185 | )
186 | s.gp.fs_checks(s.root, s.parsed_args)
187 | self.assertTrue(get_check().is_linux)
188 | self.assertTrue(get_check().max_filename >= 242)
189 |
--------------------------------------------------------------------------------