├── .code-style.md
├── .deployment-instructions.md
├── .documentation.md
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── pull_request_template.md
└── workflows
│ ├── code_coverage.yml
│ ├── lint_and_test.yml
│ └── publish.yml
├── .gitignore
├── .mapping.png
├── .pre-commit-config.yaml
├── .readthedocs.yml
├── .testing.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── Makefile
├── README.md
├── docs
├── Makefile
├── api
│ ├── modules.rst
│ ├── pymove.core.rst
│ ├── pymove.models.pattern_mining.rst
│ ├── pymove.models.rst
│ ├── pymove.preprocessing.rst
│ ├── pymove.query.rst
│ ├── pymove.rst
│ ├── pymove.semantic.rst
│ ├── pymove.uncertainty.rst
│ ├── pymove.utils.rst
│ └── pymove.visualization.rst
├── conf.py
├── examples
│ ├── 00_What_is_PyMove.rst
│ ├── 01_Exploring_MoveDataFrame.rst
│ ├── 02_Exploring_Preprossessing.rst
│ ├── 03_Exploring_Visualization.rst
│ ├── 03_Exploring_Visualization_files
│ │ ├── 03_Exploring_Visualization_11_0.png
│ │ ├── 03_Exploring_Visualization_12_0.png
│ │ ├── 03_Exploring_Visualization_35_0.png
│ │ └── 03_Exploring_Visualization_38_0.png
│ ├── 04_Exploring_Grid.rst
│ ├── 04_Exploring_Grid_files
│ │ └── 04_Exploring_Grid_26_0.png
│ ├── 05_Exploring_Utils.rst
│ ├── 06_Exploring_Integrations.rst
│ ├── 07_Exploring_Query.rst
│ ├── 08_Exploring_Semantic.rst
│ └── notebooks.rst
├── index.rst
├── make.bat
└── requirements.txt
├── notebooks
├── 00_What_is_PyMove.ipynb
├── 01_Exploring_MoveDataFrame.ipynb
├── 02_Exploring_Preprossessing.ipynb
├── 03_Exploring_Visualization.ipynb
├── 04_Exploring_Grid.ipynb
├── 05_Exploring_Utils.ipynb
├── 06_Exploring_Integrations.ipynb
├── 07_Exploring_Query.ipynb
├── 08_Exploring_Semantic.ipynb
├── 09_Exploring_Visualization_Application.ipynb
├── README.md
├── atlantic.csv
├── extracao_geolife.ipynb
└── geolife_sample.csv
├── pymove
├── __init__.py
├── core
│ ├── __init__.py
│ ├── dask.py
│ ├── dataframe.py
│ ├── grid.py
│ ├── interface.py
│ ├── pandas.py
│ └── pandas_discrete.py
├── models
│ ├── __init__.py
│ ├── anomaly_detection.py
│ ├── classification.py
│ └── pattern_mining
│ │ ├── __init__.py
│ │ ├── clustering.py
│ │ ├── freq_seq_patterns.py
│ │ ├── moving_together_patterns.py
│ │ └── periodic_patterns.py
├── preprocessing
│ ├── __init__.py
│ ├── compression.py
│ ├── filters.py
│ ├── segmentation.py
│ └── stay_point_detection.py
├── query
│ ├── __init__.py
│ └── query.py
├── semantic
│ ├── __init__.py
│ └── semantic.py
├── tests
│ ├── __init__.py
│ ├── baseline
│ │ ├── features.png
│ │ ├── plot_bounds.png
│ │ ├── plot_coords.png
│ │ ├── plot_line.png
│ │ ├── shot_points_by_date.png
│ │ ├── shot_points_by_date_line.png
│ │ ├── show_gps_points.png
│ │ ├── traj_id.png
│ │ └── trajectories.png
│ ├── test_base_init.py
│ ├── test_core_dask.py
│ ├── test_core_dataframe.py
│ ├── test_core_grid.py
│ ├── test_core_pandas.py
│ ├── test_core_pandas_discrete.py
│ ├── test_models_pattern_mining_clustering.py
│ ├── test_preprocessing_compression.py
│ ├── test_preprocessing_filters.py
│ ├── test_preprocessing_segmentation.py
│ ├── test_preprocessing_stay_point_detection.py
│ ├── test_query.py
│ ├── test_semantic.py
│ ├── test_utils_conversions.py
│ ├── test_utils_data_augmentation.py
│ ├── test_utils_datetime.py
│ ├── test_utils_distances.py
│ ├── test_utils_geoutils.py
│ ├── test_utils_integration.py
│ ├── test_utils_math.py
│ ├── test_utils_mem.py
│ ├── test_utils_networkx.py
│ ├── test_utils_trajectories.py
│ ├── test_utils_visual.py
│ ├── test_visualization_folium.py
│ └── test_visualization_matplotlib.py
├── uncertainty
│ ├── __init__.py
│ ├── privacy.py
│ └── reducing.py
├── utils
│ ├── __init__.py
│ ├── constants.py
│ ├── conversions.py
│ ├── data_augmentation.py
│ ├── datetime.py
│ ├── distances.py
│ ├── geoutils.py
│ ├── integration.py
│ ├── log.py
│ ├── math.py
│ ├── mem.py
│ ├── networkx.py
│ ├── trajectories.py
│ └── visual.py
└── visualization
│ ├── __init__.py
│ ├── folium.py
│ └── matplotlib.py
├── requirements-dev.txt
├── requirements.txt
├── setup.cfg
└── setup.py
/.code-style.md:
--------------------------------------------------------------------------------
1 | # Code Style
2 |
3 | We are using some tools to keep a default coding style in the project.
4 | - git pre-commit hooks to lint and perform some standardization
5 | on every commit.
6 |
7 | - codacy, a platform integrated to github that executes static
8 | code quality analysis and track quality evolution.
9 |
10 | ---
11 |
12 | ## Docstrings and Type Hinting
13 |
14 | We following the [Numpy](https://numpydoc.readthedocs.io/en/latest/format.html) docstring style with [PEP484](https://www.python.org/dev/peps/pep-0484/#abstract).
15 |
16 | ## Links
17 | -
18 | -
19 |
20 | ---
21 |
22 | ## flake8
23 |
24 | Flake8 is a python linter that helps to keep the code up to PEP standards.
25 |
26 | ## mypy
27 |
28 | Mypy is a static type checker for python
29 |
30 | To lint the code, run: `make lint`
31 |
32 | ## Pre-Commit
33 |
34 | Next we'll explain the steps to use the pre-commit hooks.
35 |
36 | 1. Download and install the python package `pre-commit`
37 | - `conda install -c conda-forge pre-commit` or `pip install pre-commit`
38 | - In the repository folder, run `pre-commit install`
39 |
40 | 2. With `pre-commit`, the next time you make a commit, it will analise
41 | your code to match the hooks defined in the `.pre-commit-config.yaml` file.
42 | ```text
43 | Fix double quoted strings................................................Passed
44 | Trim Trailing Whitespace.................................................Failed
45 | - hook id: trailing-whitespace
46 | - exit code: 1
47 | - files were modified by this hook
48 |
49 | Fixing pymove/core/dataframe.py
50 |
51 | Fix End of Files.........................................................Passed
52 | Mixed line ending........................................................Passed
53 | Check for added large files..............................................Passed
54 | Don't commit to branch...................................................Passed
55 | seed isort known_third_party.............................................Failed
56 | isort....................................................................Failed
57 | - hook id: isort
58 | - files were modified by this hook
59 |
60 | Fixing pymove/core/dataframe.py
61 | Fixing pymove/utils/trajectories.py
62 |
63 | flake8...................................................................Failed
64 | - hook id: flake8
65 | - exit code: 1
66 |
67 | pymove/utils/constants.py:64:5: F601 dictionary key 11 repeated with different values
68 | pymove/utils/constants.py:65:5: F601 dictionary key 12 repeated with different values
69 | pymove/utils/constants.py:74:5: F601 dictionary key 11 repeated with different values
70 | pymove/utils/constants.py:75:5: F601 dictionary key 12 repeated with different values
71 | pymove/core/dataframe.py:970:29: E711 comparison to None should be 'if cond is None:'
72 |
73 | ```
74 |
75 | ### Explaining each hook
76 |
77 | - Fix double quoted strings: Checks and fixes every string declaration
78 | is made using single quotes.
79 |
80 | - Trim Trailing Whitespace: Checks and fixes so there are no
81 | trailing whitespaces.
82 |
83 | - Fix End of Files: Checks and fixes so that all files end's
84 | with a blank line.
85 |
86 | - Mixed line endings: Checks and fixes line breaks to use the unix `LF`.
87 |
88 | - Check for added large files: Doesn't allow files bigger than `15Mb`
89 | to be commited.
90 |
91 | - Don't commit to branch: Doesn't allow direct commits to `master` branch.
92 |
93 |
94 | - isort: Sorts the imports.
95 |
96 | - flake8: Ensures that the code follows `pylint` and `pyflakes` guidelines.
97 | It will point the errors in the code.
98 |
99 | - mypy: Performs type checking.
100 | It will point the errors in the code.
101 | ---
102 |
103 | ## Codacy
104 |
105 | Codacy statically analyzes `Pull Requests` made into the `master` and `developer` branches.
106 |
107 | Codacy uses the tools `Bandit`, `Prospector`, `PyLint` and `Remark Lint`;
108 |
109 | The error encountered will be pointed by Codacy as `PR Comments`.
110 |
--------------------------------------------------------------------------------
/.deployment-instructions.md:
--------------------------------------------------------------------------------
1 | # Deploying Python Packages
2 |
3 | ---
4 |
5 | ## Pypi Deployment
6 |
7 | ---
8 |
9 | ### Uploading a package to PyPI
10 |
11 | The link in this tutorial will explain the steps to upload a package to pypi:
12 |
13 | #### Use Github Actions to deploy
14 |
15 | 1. Create an API token to authenticate with PyPI:
16 | - In your Pypi account settings, go to API tokens section and select "Add API token"
17 |
18 | - Add the token to the Github Actions Secret.
19 |
20 | 2. Create a github workflow with the following content:
21 | ```yaml
22 | name: Publish to PyPI
23 | on:
24 | push:
25 | tags:
26 | - "*"
27 |
28 | jobs:
29 | build-n-publish:
30 | if: github.event.base_ref == 'refs/heads/' && startsWith(github.ref, 'refs/tags')
31 | name: Build and publish package
32 | runs-on: ubuntu-latest
33 | steps:
34 | - uses: actions/checkout@v2
35 | - name: Set up Python 3.6
36 | uses: actions/setup-python@v2
37 | with:
38 | python-version: 3.6
39 | - name: Install dependencies
40 | run: |
41 | python -m pip install --upgrade pip
42 | make dev
43 | - name:
44 | run: |
45 | make lint
46 | make test
47 | - name: Build
48 | run: |
49 | pip install setuptools wheel twine
50 | python setup.py sdist bdist_wheel
51 | - name: Publish
52 | uses: pypa/gh-action-pypi-publish@master
53 | with:
54 | user: __token__
55 | password: ${{ secrets.pypi_password }}
56 | ```
57 |
58 | #### Configure bump2version
59 |
60 | For the versioning control we a using the package bump2version.
61 |
62 | 1. Run `pip install bump2version` in your environment
63 | 2. Add the following attributes to the setup.cfg file:
64 | ```conf
65 | [bumpversion]
66 | current_version =
67 | allow_dirty = True
68 | tag_name = version-{new_version}
69 | tag = True
70 | commit = True
71 | [bumpversion:file:]
72 | [bumpversion:file:]
73 | ```
74 |
75 | ***Note:*** If `NotADirectoryError: [Errno 20] Not a directory`,
76 | check for a fix.
77 |
78 | #### For more information see these links
79 |
80 | -
81 | -
82 |
83 | ---
84 |
85 | 1. Run the command `bumperversion [major|minor|patch]` to increase the version number.
86 | This will create a new tag and commit the changes.
87 |
88 | 2. Push the changes to the developer branch.
89 |
90 | 3. Create a pull request onto master. To deploy pymove to Pypi using
91 | you must be in the master branch, pushing a tagged commit.
92 |
93 | 4. After merging the new version into the master branch, push the new
94 | tag created by bump2version.
95 | - `git push --tags`
96 |
97 | ---
98 |
99 | ## Conda-Forge Deployment
100 |
101 | ---
102 |
103 | ### Uploading a package to conda-forge
104 |
105 | The link in this tutorial will explain the steps to upload a
106 | package to conda-forge:
107 |
108 | ---
109 |
110 | #### Building conda recipe
111 |
112 | With the package published to Pypi, we can easily deploy to the
113 | conda-forge chanel. First we create a conda recipe.
114 |
115 | 1. Run the command conda `conda skeleton pypi `.
116 | This will create a conda recipe for the package.
117 |
118 | 2. Now add some information to the `/meta.yaml` file.
119 | ```yaml
120 | {% set name = %}
121 | {% set version = %}
122 |
123 | package:
124 | name: "{{ name|lower }}"
125 | version: "{{ version }}"
126 |
127 | source:
128 | url: "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz"
129 | sha256:
130 |
131 | build:
132 | number: 0
133 | script: "{{ PYTHON }} -m pip install . -vv"
134 | noarch: python
135 |
136 | requirements:
137 | host:
138 | - pip
139 | - python >=3.6
140 | run:
141 | -
142 | - python >=3.6
143 |
144 | test:
145 | imports:
146 | -
147 |
148 | about:
149 | home:
150 | license:
151 | license_family:
152 | license_file:
153 | summary:
154 | doc_url:
155 | dev_url:
156 |
157 | extra:
158 | recipe-maintainers:
159 | -
160 | -
161 | ```
162 |
163 | All package run requirements must be available in the conda-forge channel.
164 |
165 | #### Request the publication to the conda-forge channel
166 |
167 | 1. Fork the example recipes repository at
168 |
169 | 2. Copy the `/meta.yaml` file created in the step above to
170 | the forked repo `staged-recipes/recipes/example` directory
171 |
172 | 3. Push the changes to your forked repository.
173 |
174 | 4. Make a pull request for your repository to the master branch on
175 | the stage-recipes repository.
176 | - `conda-forge:master from :`
177 |
178 | 5. Now, the pull request will be checked.
179 | - Complete the checklist for the pull requests.
180 |
181 | - The recipe meta.yaml file will be checked by the `conda-forge-linting service`.
182 |
183 | - The recipe will be built for `linux64`, `macos64`
184 | and `windows64` systems.
185 |
186 | 6. If there are any problems with the PR, a review team member will give
187 | you feedback, pointing out improvements and answering questions.
188 |
189 | 7. Once everything is in order, the pull request will be aproved.
190 |
191 | ---
192 |
193 | ### Maintaining the feedstock repository
194 |
195 | 1. After the pull request, a new repository for the package
196 | will be created similar to `https://github.com/conda-forge/-feedstock.`
197 |
198 | 2. You will be added as a maintainer for the newly created repository
199 |
200 | 3. The repository will automatically build the conda package
201 | and upload to
202 |
203 | 4. To make any changes to the conda package, send a `PR` to the
204 | git feedstock repository main branch from a fork.
205 |
206 | 5. Branches of the main repository are used for maintaining
207 | different versions only.
208 |
209 | The `regro-cf-autotick-bot` will make an automatic `Pull Request`
210 | when it detects that the PyPi package has been updated.
211 |
212 | Follow the `PR` instructions to update the `conda-forge` package.
213 |
214 | ---
215 |
--------------------------------------------------------------------------------
/.documentation.md:
--------------------------------------------------------------------------------
1 | # Documentation
2 |
3 | We use sphinx to build and readthedocs to host the documentation.
4 |
5 | ## How create doc with Sphinx
6 | ### 1. Getting to know Sphinx
7 | #### 1.1 Sphinx
8 | Sphinx is a documentation generator or a tool that translates a set of
9 | plain text source files into various output formats, automatically producing
10 | cross-references, indices, etc. That is, if you have a directory containing a
11 | bunch of reStructuredText or Markdown documents, Sphinx can generate a series
12 | of HTML files, a PDF file (via LaTeX), man pages and much more.
13 |
14 | For more information access the [documentation here](https://www.sphinx-doc.org/en/master/usage/quickstart.html).
15 |
16 | ##### 1.1.1. Sphinx APIDoc
17 | It is a tool for automatic generation of Sphinx sources that, using the
18 | autodoc extension, document a whole package in the style of other
19 | automatic API documentation tools.
20 | For more information access the [documentation here](https://www.sphinx-doc.org/en/master/man/sphinx-apidoc.html).
21 |
22 | ### 2. Tutorial
23 | Here we will describe how to create a docs, configure the conf.py
24 | file and update the documentation.
25 |
26 | #### 2.1. Create the documentation
27 | 1. Install Sphinx!
28 |
29 | `pip install Sphinx`
30 | `pip install pip install sphinx_rtd_theme`
31 |
32 | 2. First, make the directory docs with command
33 |
34 | `mkdir docs`
35 |
36 | 3. Run the following command
37 |
38 | `sphinx-apidoc -o docs pymove -full`
39 |
40 | Finish! Your documentation has been created! The generated files are
41 | of the extension .rst (reStructuredText).
42 | There are **two main files**:
43 |
44 | - **index.rst**: is the root of your documentation
45 |
46 | - **conf.py**: where are the dependencies and internal settings,
47 | such as the html theme, and imports and the path to the library.
48 |
49 | #### 2.2. Configure the conf.py
50 | In the file **conf.py**, include the following imports:
51 |
52 | `import os`
53 |
54 | `import sys`
55 |
56 | And include the following code snippet, referring to the library path:
57 |
58 | `sys.path.append(os.path.join(os.path.dirname(__name__), '..'))`
59 |
60 | Now, you must:
61 | 1. Describe project informations
62 | 2. Configure extensions
63 |
64 | ```python
65 | extensions = [
66 | 'sphinx.ext.autodoc',
67 | 'sphinx.ext.napoleon',
68 | 'sphinx.ext.coverage',
69 | 'sphinx.ext.viewcode',
70 | 'sphinx.ext.todo',
71 | 'sphinx_rtd_theme'
72 | ]
73 | ```
74 |
75 | 3. Configure theme html
76 |
77 | `html_theme = 'sphinx_rtd_theme'`
78 |
79 | and finish!
80 |
81 | #### 2.3. Generating the .html files
82 | To generate the .html files, just access the docs folder, just run the
83 | following command:
84 |
85 | `make doc`
86 |
87 | #### 2.4. Hospedando docs in [Readthedocs](https://readthedocs.org/)
88 |
89 | 1. Log in to Readthedocs with your github account
90 |
91 | 2. Import the project/repository
92 |
93 | 3. Select the project and the branch where your project contains
94 | the documentation
95 |
96 | 4. Click on build project
97 |
98 | 5. After preparing the environment and finalizing the building process,
99 | you can see your project's documentation in **view docs**.
100 |
101 | #### 2.5. Update the documentation
102 | To update the documentation just run the following command and move
103 | the generated files to the folder `references/`.
104 |
105 | `sphinx-apidoc -f -o docs/references pymove pymove/tests/`
106 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## Types of changes
2 |
3 | - [ ] Bug fix (non-breaking change which fixes an issue)
4 | - [ ] New feature (non-breaking change which adds functionality)
5 | - [ ] Breaking change (fix or feature that would cause existing functionality to change)
6 | - [ ] I have read the **CONTRIBUTING** document.
7 | - [ ] My code follows the code style of this project (see `.code-style.md`).
8 | - [ ] All new and existing tests passed (see `.testing.md`).
9 | - [ ] I have added tests to cover my changes.
10 | - [ ] My change requires a change to the documentation.
11 | - [ ] I have updated the documentation accordingly (see `.documentation.md`).
12 |
13 | ## Description
14 |
15 | - What is the current behavior? (You can also link to an open issue here)
16 |
17 |
18 | - What is the new behavior (if this is a feature change)?
19 |
20 |
--------------------------------------------------------------------------------
/.github/workflows/code_coverage.yml:
--------------------------------------------------------------------------------
1 | name: Code Coverage
2 | on:
3 | push:
4 | branches:
5 | - "master"
6 | - "developer"
7 |
8 | jobs:
9 | code-coverage:
10 | name: Code Coverage
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@main
14 | - name: Set up Python 3.7
15 | uses: actions/setup-python@main
16 | with:
17 | python-version: 3.7
18 | - name: Install dependencies
19 | run: |
20 | python -m pip install --upgrade pip
21 | make dev
22 | - name: Coverage
23 | run: |
24 | make coverage
25 | coverage xml -o "coverage.xml"
26 | - name: Codacy Coverage Reporter
27 | uses: codacy/codacy-coverage-reporter-action@master
28 | with:
29 | project-token: ${{ secrets.CODACY_PROJECT_TOKEN }}
30 | coverage-reports: coverage.xml
31 |
--------------------------------------------------------------------------------
/.github/workflows/lint_and_test.yml:
--------------------------------------------------------------------------------
1 | name: Lint and Test
2 | on: [push, pull_request]
3 |
4 | jobs:
5 | lint-test:
6 | name: Lint and Test
7 | runs-on: ubuntu-latest
8 | strategy:
9 | matrix:
10 | python-version: ['3.7', '3.8', '3.9']
11 | steps:
12 | - uses: actions/checkout@main
13 | - uses: actions/setup-python@main
14 | with:
15 | python-version: ${{ matrix.python-version }}
16 | - name: Install dependencies
17 | run: |
18 | python -m pip install --upgrade pip
19 | make dev
20 | - name: Check code style
21 | working-directory: ${{ github.workspace }}
22 | run: |
23 | make lint
24 | - name: Runs unit tests
25 | working-directory: ${{ github.workspace }}
26 | run: |
27 | make test
28 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish to PyPI
2 | on:
3 | push:
4 | tags:
5 | - "*"
6 |
7 | jobs:
8 | build-n-publish:
9 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
10 | name: Build and publish package
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@main
14 | - name: Set up Python 3.7
15 | uses: actions/setup-python@main
16 | with:
17 | python-version: 3.7
18 | - name: Install dependencies
19 | run: |
20 | python -m pip install --upgrade pip
21 | make dev
22 | - name: Lint and Test
23 | run: |
24 | make lint
25 | make test
26 | - name: Build
27 | run: |
28 | pip install setuptools wheel twine
29 | python setup.py sdist bdist_wheel
30 | - name: Publish
31 | uses: pypa/gh-action-pypi-publish@master
32 | with:
33 | user: __token__
34 | password: ${{ secrets.pypi_password }}
35 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig
2 |
3 | # Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode,linux,jupyternotebooks,macos,pycharm,python,windows
4 | # Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode,linux,jupyternotebooks,macos,pycharm,python,windows
5 |
6 | ### JupyterNotebooks ###
7 | # gitignore template for Jupyter Notebooks
8 | # website: http://jupyter.org/
9 |
10 | .ipynb_checkpoints
11 | */.ipynb_checkpoints/*
12 |
13 | # IPython
14 | profile_default/
15 | ipython_config.py
16 |
17 | # Remove previous ipynb_checkpoints
18 | # git rm -r .ipynb_checkpoints/
19 |
20 | ### Linux ###
21 | *~
22 |
23 | # temporary files which can be created if a process still has a handle open of a deleted file
24 | .fuse_hidden*
25 |
26 | # KDE directory preferences
27 | .directory
28 |
29 | # Linux trash folder which might appear on any partition or disk
30 | .Trash-*
31 |
32 | # .nfs files are created when an open file is removed but is still being accessed
33 | .nfs*
34 |
35 | ### macOS ###
36 | # General
37 | .DS_Store
38 | .AppleDouble
39 | .LSOverride
40 |
41 | # Icon must end with two \r
42 | Icon
43 |
44 | # Thumbnails
45 | ._*
46 |
47 | # Files that might appear in the root of a volume
48 | .DocumentRevisions-V100
49 | .fseventsd
50 | .Spotlight-V100
51 | .TemporaryItems
52 | .Trashes
53 | .VolumeIcon.icns
54 | .com.apple.timemachine.donotpresent
55 |
56 | # Directories potentially created on remote AFP share
57 | .AppleDB
58 | .AppleDesktop
59 | Network Trash Folder
60 | Temporary Items
61 | .apdisk
62 |
63 | ### PyCharm ###
64 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
65 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
66 |
67 | # User-specific stuff
68 | .idea/**/workspace.xml
69 | .idea/**/tasks.xml
70 | .idea/**/usage.statistics.xml
71 | .idea/**/dictionaries
72 | .idea/**/shelf
73 |
74 | # Generated files
75 | .idea/**/contentModel.xml
76 |
77 | # Sensitive or high-churn files
78 | .idea/**/dataSources/
79 | .idea/**/dataSources.ids
80 | .idea/**/dataSources.local.xml
81 | .idea/**/sqlDataSources.xml
82 | .idea/**/dynamic.xml
83 | .idea/**/uiDesigner.xml
84 | .idea/**/dbnavigator.xml
85 |
86 | # Gradle
87 | .idea/**/gradle.xml
88 | .idea/**/libraries
89 |
90 | # Gradle and Maven with auto-import
91 | # When using Gradle or Maven with auto-import, you should exclude module files,
92 | # since they will be recreated, and may cause churn. Uncomment if using
93 | # auto-import.
94 | # .idea/artifacts
95 | # .idea/compiler.xml
96 | # .idea/jarRepositories.xml
97 | # .idea/modules.xml
98 | # .idea/*.iml
99 | # .idea/modules
100 | # *.iml
101 | # *.ipr
102 |
103 | # CMake
104 | cmake-build-*/
105 |
106 | # Mongo Explorer plugin
107 | .idea/**/mongoSettings.xml
108 |
109 | # File-based project format
110 | *.iws
111 |
112 | # IntelliJ
113 | out/
114 |
115 | # mpeltonen/sbt-idea plugin
116 | .idea_modules/
117 |
118 | # JIRA plugin
119 | atlassian-ide-plugin.xml
120 |
121 | # Cursive Clojure plugin
122 | .idea/replstate.xml
123 |
124 | # Crashlytics plugin (for Android Studio and IntelliJ)
125 | com_crashlytics_export_strings.xml
126 | crashlytics.properties
127 | crashlytics-build.properties
128 | fabric.properties
129 |
130 | # Editor-based Rest Client
131 | .idea/httpRequests
132 |
133 | # Android studio 3.1+ serialized cache file
134 | .idea/caches/build_file_checksums.ser
135 |
136 | ### PyCharm Patch ###
137 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
138 |
139 | # *.iml
140 | # modules.xml
141 | # .idea/misc.xml
142 | # *.ipr
143 |
144 | # Sonarlint plugin
145 | # https://plugins.jetbrains.com/plugin/7973-sonarlint
146 | .idea/**/sonarlint/
147 |
148 | # SonarQube Plugin
149 | # https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
150 | .idea/**/sonarIssues.xml
151 |
152 | # Markdown Navigator plugin
153 | # https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
154 | .idea/**/markdown-navigator.xml
155 | .idea/**/markdown-navigator-enh.xml
156 | .idea/**/markdown-navigator/
157 |
158 | # Cache file creation bug
159 | # See https://youtrack.jetbrains.com/issue/JBR-2257
160 | .idea/$CACHE_FILE$
161 |
162 | # CodeStream plugin
163 | # https://plugins.jetbrains.com/plugin/12206-codestream
164 | .idea/codestream.xml
165 |
166 | ### Python ###
167 | # Byte-compiled / optimized / DLL files
168 | __pycache__/
169 | *.py[cod]
170 | *$py.class
171 |
172 | # C extensions
173 | *.so
174 |
175 | # Distribution / packaging
176 | .Python
177 | build/
178 | develop-eggs/
179 | dist/
180 | downloads/
181 | eggs/
182 | .eggs/
183 | lib/
184 | lib64/
185 | parts/
186 | sdist/
187 | var/
188 | wheels/
189 | pip-wheel-metadata/
190 | share/python-wheels/
191 | *.egg-info/
192 | .installed.cfg
193 | *.egg
194 | MANIFEST
195 |
196 | # PyInstaller
197 | # Usually these files are written by a python script from a template
198 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
199 | *.manifest
200 | *.spec
201 |
202 | # Installer logs
203 | pip-log.txt
204 | pip-delete-this-directory.txt
205 |
206 | # Unit test / coverage reports
207 | htmlcov/
208 | .tox/
209 | .nox/
210 | .coverage
211 | .coverage.*
212 | .cache
213 | nosetests.xml
214 | coverage.xml
215 | *.cover
216 | *.py,cover
217 | .hypothesis/
218 | .pytest_cache/
219 | pytestdebug.log
220 |
221 | # Translations
222 | *.mo
223 | *.pot
224 |
225 | # Django stuff:
226 | *.log
227 | local_settings.py
228 | db.sqlite3
229 | db.sqlite3-journal
230 |
231 | # Flask stuff:
232 | instance/
233 | .webassets-cache
234 |
235 | # Scrapy stuff:
236 | .scrapy
237 |
238 | # Sphinx documentation
239 | docs/_build/
240 | doc/_build/
241 |
242 | # PyBuilder
243 | target/
244 |
245 | # Jupyter Notebook
246 |
247 | # IPython
248 |
249 | # pyenv
250 | .python-version
251 |
252 | # pipenv
253 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
254 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
255 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
256 | # install all needed dependencies.
257 | #Pipfile.lock
258 |
259 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
260 | __pypackages__/
261 |
262 | # Celery stuff
263 | celerybeat-schedule
264 | celerybeat.pid
265 |
266 | # SageMath parsed files
267 | *.sage.py
268 |
269 | # Environments
270 | .env
271 | .venv
272 | env/
273 | venv/
274 | ENV/
275 | env.bak/
276 | venv.bak/
277 |
278 | # Spyder project settings
279 | .spyderproject
280 | .spyproject
281 |
282 | # Rope project settings
283 | .ropeproject
284 |
285 | # mkdocs documentation
286 | /site
287 |
288 | # mypy
289 | .mypy_cache/
290 | .dmypy.json
291 | dmypy.json
292 |
293 | # Pyre type checker
294 | .pyre/
295 |
296 | # pytype static type analyzer
297 | .pytype/
298 |
299 | ### VisualStudioCode ###
300 | .vscode/*
301 | .vscode/settings.json
302 | .vscode/tasks.json
303 | .vscode/launch.json
304 | .vscode/extensions.json
305 | *.code-workspace
306 | .devcontainer/
307 |
308 | ### VisualStudioCode Patch ###
309 | # Ignore all local history of files
310 | .history
311 |
312 | ### Windows ###
313 | # Windows thumbnail cache files
314 | Thumbs.db
315 | Thumbs.db:encryptable
316 | ehthumbs.db
317 | ehthumbs_vista.db
318 |
319 | # Dump file
320 | *.stackdump
321 |
322 | # Folder config file
323 | [Dd]esktop.ini
324 |
325 | # Recycle Bin used on file shares
326 | $RECYCLE.BIN/
327 |
328 | # Windows Installer files
329 | *.cab
330 | *.msi
331 | *.msix
332 | *.msm
333 | *.msp
334 |
335 | # Windows shortcuts
336 | *.lnk
337 |
338 | # End of https://www.toptal.com/developers/gitignore/api/visualstudiocode,linux,jupyternotebooks,macos,pycharm,python,windows
339 | # Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)
340 |
341 | developing/
342 | notebooks/*
343 | !notebooks/geolife_sample.csv
344 | !notebooks/atlantic.csv
345 | !notebooks/README.md
346 | !notebooks/[0-9]*.ipynb
347 | !notebooks/extracao_geolife.ipynb
348 |
--------------------------------------------------------------------------------
/.mapping.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/.mapping.png
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v3.4.0
4 | hooks:
5 | - id: double-quote-string-fixer
6 | - id: trailing-whitespace
7 | - id: end-of-file-fixer
8 | - id: mixed-line-ending
9 | args: ['--fix=lf']
10 | - id: check-added-large-files
11 | args: ['--maxkb=25000']
12 | - id: no-commit-to-branch
13 | - repo: https://github.com/humitos/mirrors-autoflake
14 | rev: v1.3
15 | hooks:
16 | - id: autoflake
17 | args: ['--in-place', '--remove-all-unused-imports', '--ignore-init-module-imports']
18 | - repo: https://github.com/asottile/seed-isort-config
19 | rev: v2.2.0
20 | hooks:
21 | - id: seed-isort-config
22 | - repo: https://github.com/pre-commit/mirrors-isort
23 | rev: v5.7.0
24 | hooks:
25 | - id: isort
26 | - repo: https://gitlab.com/pycqa/flake8
27 | rev: 3.8.4
28 | hooks:
29 | - id: flake8
30 | additional_dependencies: [
31 | 'flake8-bugbear==21.3.2',
32 | 'flake8-docstrings==1.6.0',
33 | 'pep8-naming==0.11.1'
34 | ]
35 | - repo: https://github.com/pre-commit/mirrors-mypy
36 | rev: 'v0.812'
37 | hooks:
38 | - id: mypy
39 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Build documentation in the docs/ directory with Sphinx
9 | sphinx:
10 | configuration: ./docs/conf.py
11 |
12 | # Optionally build your docs in additional formats such as PDF
13 | formats:
14 | - pdf
15 |
16 | # Optionally set the version of Python and requirements required to build your docs
17 | python:
18 | version: 3.7
19 | install:
20 | - requirements: ./docs/requirements.txt
21 | - method: pip
22 | path: .
23 |
--------------------------------------------------------------------------------
/.testing.md:
--------------------------------------------------------------------------------
1 | # Testing
2 |
3 | We are using some tools for testing modifications and new features additions.
4 | - automated tests using travis and pytest.
5 | - coverage.py, a tool for measuring code coverage of Python programs.
6 |
7 | ---
8 |
9 | ## Links
10 | -
11 | -
12 |
13 | ---
14 |
15 | ## PyTest
16 |
17 | Next we'll explain the steps to test your code. When a test is run, a `.` means
18 | that the test passed and an `F` means it failed.
19 |
20 | 1. In your environment, install `pytest` by running `pip install pytest`
21 | or `conda install -c conda-forge pytest`
22 |
23 | 2. Now, simply run `pytest`, and it will give you and output like this
24 | if all tests passed
25 | ```text
26 | ++ pytest --showlocals --pyargs pymove/
27 | ============================================== test session starts ==============================================
28 | platform linux -- Python 3.7.6, pytest-5.4.2, py-1.8.1, pluggy-0.13.1
29 | rootdir: /home/flycher/Documentos/PyMove, inifile: setup.cfg
30 | collected 95 items
31 |
32 | pymove/tests/test_init.py . [ 1%]
33 | pymove/tests/core/test_data_frame.py ......................................................................[ 74%]
34 | pymove/tests/preprocessing/test_compression.py . [ 75%]
35 | pymove/tests/preprocessing/test_filters.py ............... [ 91%]
36 | pymove/tests/preprocessing/test_segmentation.py ..... [ 96%]
37 | pymove/tests/preprocessing/test_stay_point_detection.py ... [100%]
38 |
39 | ====================================== 95 passed, 1 warning in 3.62s =============================================
40 | ```
41 |
42 | 3. And like this if any tests fail
43 | ```text
44 | ++ pytest --showlocals --pyargs pymove/
45 | ============================================== test session starts ==============================================
46 | platform linux -- Python 3.7.6, pytest-5.4.2, py-1.8.1, pluggy-0.13.1
47 | rootdir: /home/flycher/Documentos/PyMove, inifile: setup.cfg
48 | collected 95 items
49 |
50 | pymove/tests/test_init.py . [ 1%]
51 | pymove/tests/core/test_data_frame.py ......................................................................[ 74%]
52 | pymove/tests/preprocessing/test_compression.py . [ 75%]
53 | pymove/tests/preprocessing/test_filters.py .F............. [ 91%]
54 | pymove/tests/preprocessing/test_segmentation.py ..... [ 96%]
55 | pymove/tests/preprocessing/test_stay_point_detection.py ... [100%]
56 |
57 | =================================================================================================================
58 | def test_by_datetime():
59 | move_df, cols = _prepare_df_default(list_data_1)
60 |
61 | filter_values_start = filters.by_datetime(
62 | move_df, start_datetime='2008-10-23 05:53:11')
63 | expected = DataFrame(data=[
64 | [39.984199952392578, 116.31932067871094, Timestamp('2008-10-23 05:53:06'), 1],
65 | [39.984222412109375, 116.31940460205078, Timestamp('2008-10-23 05:53:11'), 2],
66 | [39.984222412109375, 116.31940460205078, Timestamp('2008-10-23 05:53:11'), 2]
67 | ], columns=cols, index=[1, 2, 3]
68 | )
69 | > assert_frame_equal(filter_values_start, expected)
70 | E AssertionError: DataFrame are different
71 | E
72 | E DataFrame shape mismatch
73 | E [left]: (2, 4)
74 | E [right]: (3, 4)
75 | ==================================== short test summary info =====================================================
76 | FAILED pymove/tests/preprocessing/test_filters.py::test_by_datetime - AssertionError: DataFrame are different
77 | ================================= 1 failed, 94 passed, 1 warning in 3.68s ========================================
78 | ```
79 |
80 | ### Testing new features and bugfixes
81 |
82 | If a new the code was modified, it is essential to run against the existing
83 | tests and implement new tests if new features were created.
84 | To create a new test, you can follow the structure similar to the
85 | source code, for example
86 |
87 | - If creating a new function named `filter_by_city` in the module `pymove/preprocessing/filter.py`,
88 | the test must be implemented in the file `pymove/tests/test_preprocessing_filters.py`,
89 | named `test_filter_by_city`
90 |
91 | - If creating a new module, for example `pymove/models/clustering/hierarchical.py`,
92 | the tests must be placed in `pymove/tests/test_models_clustering_hierarchical.py`
93 |
94 | - Every test must be independent from another, and if using files, you must
95 | use the `tmpdir` directive, like bellow. More on
96 | ```text
97 | def test_some_thing(tmpdir):
98 | d = tmpdir.mkdir('folder')
99 |
100 | f = d.join('name.csv')
101 | f.write(data)
102 | filename = os.path.join(
103 | f.dirname, f.basename
104 | )
105 | with open(filename, 'r') as f:
106 | ...
107 | ```
108 |
109 | ---
110 |
111 | ## Coverage.py
112 |
113 | `Coverage.py` allows us to se which lines of code are beign covered bt our tests
114 | To use it, install running `pip install coverage` or `conda install -c conda-forge coverage`
115 | - To create the report, run `coverage run -m pytest`
116 |
117 | - To view the report, run `coverage report`
118 |
119 | - The `coverage html` will create an html page that you can navigate,
120 | for that, open the fie `htmlcov/index.html`
121 |
122 | - To erase the data, run `coverage erase`
123 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age,
8 | body size, disability, ethnicity, sex characteristics,
9 | gender identity and expression, level of experience, education,
10 | socio-economic status, nationality, personal appearance,
11 | race, religion, or sexual identity and orientation.
12 |
13 | ## Our Standards
14 |
15 | Examples of behavior that contributes to creating a positive environment
16 | include:
17 |
18 | - Using welcoming and inclusive language
19 | - Being respectful of differing viewpoints and experiences
20 | - Gracefully accepting constructive criticism
21 | - Focusing on what is best for the community
22 | - Showing empathy towards other community members
23 |
24 | Examples of unacceptable behavior by participants include:
25 |
26 | - The use of sexualized language or imagery and unwelcome sexual attention or
27 | advances
28 |
29 | - Trolling, insulting/derogatory comments, and personal or political attacks
30 |
31 | - Public or private harassment
32 |
33 | - Publishing others' private information, such as a physical or electronic
34 | address, without explicit permission
35 |
36 | - Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Our Responsibilities
40 |
41 | Project maintainers are responsible for clarifying the standards of acceptable
42 | behavior and are expected to take appropriate and fair corrective action in
43 | response to any instances of unacceptable behavior.
44 |
45 | Project maintainers have the right and responsibility to remove, edit, or
46 | reject comments, commits, code, wiki edits, issues, and other contributions
47 | that are not aligned to this Code of Conduct, or to ban temporarily or
48 | permanently any contributor for other behaviors that they deem inappropriate,
49 | threatening, offensive, or harmful.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies both within project spaces and in public spaces
54 | when an individual is representing the project or its community. Examples of
55 | representing a project or community include using an official project e-mail
56 | address, posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event. Representation of a project may be
58 | further defined and clarified by project maintainers.
59 |
60 | ## Enforcement
61 |
62 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
63 | reported by contacting the project team at .
64 | All complaints will be reviewed and investigated and will result in a
65 | response that is deemed necessary and appropriate to the circumstances.
66 | The project team is obligated to maintain confidentiality with regard
67 | to the reporter of an incident.
68 | Further details of specific enforcement policies may be posted separately.
69 |
70 | Project maintainers who do not follow or enforce the Code of Conduct in good
71 | faith may face temporary or permanent repercussions as determined by other
72 | members of the project's leadership.
73 |
74 | ## Attribution
75 |
76 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
77 | version 1.4, available at
78 |
79 | For answers to common questions about this code of conduct, see
80 |
81 |
82 | [homepage]: https://www.contributor-covenant.org
83 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to PyMove
2 | We love your input! We want to make contributing to this project as easy
3 | and transparent as possible, whether it's:
4 |
5 | - Reporting a bug
6 | - Discussing the current state of the code
7 | - Submitting a fix
8 | - Proposing new features
9 | - Becoming a maintainer
10 |
11 | ## We Develop with Github
12 | We use github to host code, to track issues and feature requests,
13 | as well as accept pull requests.
14 |
15 | ## We Use [Github Flow](https://docs.github.com/en/get-started/quickstart/github-flow)
16 | All Code Changes Happen Through Pull Requests.
17 | Pull requests are the best way to propose changes to the codebase.
18 | We actively welcome your pull requests:
19 |
20 | 1. Fork the repo and create your branch from `master`.
21 | 2. If you've added code that should be tested, add tests.
22 | 3. If you've changed APIs, update the documentation.
23 | 4. Ensure the test suite passes.
24 | 5. Make sure your code lints.
25 | 6. Issue that pull request!
26 |
27 | ## Any contributions be under the [MIT Software License](http://choosealicense.com/licenses/mit/)
28 | In short, when you submit code changes, your submissions are understood to be
29 | under the same MIT License that covers the project.
30 | Feel free to contact the maintainers if that's a concern.
31 |
32 | ## Report bugs using Github's [issues](https://github.com/InsightLab/PyMove/issues)
33 | We use GitHub issues to track public bugs.
34 | Report a bug by [opening a new issue](https://github.com/InsightLab/PyMove/issues/new).
35 | It's that easy!
36 |
37 | ## Write bug reports with detail, background, and sample code
38 | [This is an example of a bug report.](http://stackoverflow.com/q/12488905/180626).
39 |
40 | **Great Bug Reports** tend to have:
41 |
42 | - A quick summary and/or background
43 |
44 | - Steps to reproduce
45 | - Be specific!
46 | - Give sample code if you can.
47 |
48 | - What you expected would happen
49 |
50 | - What actually happens
51 |
52 | - Notes (possibly including why you think this might be happening,
53 | or stuff you tried that didn't work)
54 |
55 | People *love* thorough bug reports. I'm not even kidding.
56 |
57 | ## Use a Consistent Coding Style
58 | We use `flake8` and `codacy` to standardize our code.
59 | Some styles you shoud follow.
60 | - 4 spaces for indentation rather than tabs
61 | - Use single quotes for strings
62 | - Try to keep your lines for up to 80 characters
63 | - Follow the instructions in our [code style](.code-style.md) file.
64 |
65 | ## Testing
66 | It is important to know if our code is behaving like expected,
67 | so it is important to run and develop tests for new implementations
68 | - Follow the instructions in our [testing](.testing.md) file.
69 |
70 | ## Documenting
71 | To enable automatic documentation we use sphinx,
72 | follwing the Numpy Docstring style.
73 | When creating new modules these need to be added to the docs.
74 | - Follow the instructions in our [documentation](.documentation.md) file.
75 |
76 | ## References
77 | This document was adapted from ,
78 | who follows the open-source contribution guidelines for [Facebook's Draft](https://github.com/facebook/draft-js/blob/main/CONTRIBUTING.md)
79 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Insight Data Science Lab
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include requirements.txt
2 | include LICENSE
3 | include README.md
4 | include .mapping.png
5 | include pymove/tests/baseline/*.png
6 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | help:
2 | @echo "available commands"
3 | @echo " - dev : install dev environment"
4 | @echo " - clean : clean temporary folders and files"
5 | @echo " - test : runs all unit tests"
6 | @echo " - lint : checks code style"
7 | @echo " - docs : creates documentation in html"
8 |
9 | dev:
10 | pip install -r requirements-dev.txt
11 | pip install -e .
12 | pre-commit install
13 |
14 | clean:
15 | rm -rf `find . -type d -name .pytest_cache`
16 | rm -rf `find . -type d -name .mypy_cache`
17 | rm -rf `find . -type d -name __pycache__`
18 | rm -rf `find . -type d -name .ipynb_checkpoints`
19 | rm -rf docs/_build
20 | rm -f .coverage
21 |
22 | test: clean
23 | pytest pymove
24 |
25 | coverage: clean
26 | coverage run -m pytest pymove
27 | coverage report
28 |
29 | lint: clean
30 | flake8 pymove
31 | mypy pymove
32 |
33 | docs: clean
34 | cp docs/examples/notebooks.rst docs
35 | rm -rf docs/api docs/examples
36 | sphinx-apidoc -f -o docs/api pymove pymove/tests/
37 | jupyter nbconvert --to rst --output-dir docs/examples notebooks/[0-9]*.ipynb
38 | mv docs/notebooks.rst docs/examples
39 | make -C docs html
40 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/api/modules.rst:
--------------------------------------------------------------------------------
1 | pymove
2 | ======
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | pymove
8 |
--------------------------------------------------------------------------------
/docs/api/pymove.core.rst:
--------------------------------------------------------------------------------
1 | pymove.core package
2 | ===================
3 |
4 | Submodules
5 | ----------
6 |
7 | pymove.core.dask module
8 | -----------------------
9 |
10 | .. automodule:: pymove.core.dask
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | pymove.core.dataframe module
16 | ----------------------------
17 |
18 | .. automodule:: pymove.core.dataframe
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | pymove.core.grid module
24 | -----------------------
25 |
26 | .. automodule:: pymove.core.grid
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | pymove.core.interface module
32 | ----------------------------
33 |
34 | .. automodule:: pymove.core.interface
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | pymove.core.pandas module
40 | -------------------------
41 |
42 | .. automodule:: pymove.core.pandas
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
47 | pymove.core.pandas\_discrete module
48 | -----------------------------------
49 |
50 | .. automodule:: pymove.core.pandas_discrete
51 | :members:
52 | :undoc-members:
53 | :show-inheritance:
54 |
55 | Module contents
56 | ---------------
57 |
58 | .. automodule:: pymove.core
59 | :members:
60 | :undoc-members:
61 | :show-inheritance:
62 |
--------------------------------------------------------------------------------
/docs/api/pymove.models.pattern_mining.rst:
--------------------------------------------------------------------------------
1 | pymove.models.pattern\_mining package
2 | =====================================
3 |
4 | Submodules
5 | ----------
6 |
7 | pymove.models.pattern\_mining.clustering module
8 | -----------------------------------------------
9 |
10 | .. automodule:: pymove.models.pattern_mining.clustering
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | pymove.models.pattern\_mining.freq\_seq\_patterns module
16 | --------------------------------------------------------
17 |
18 | .. automodule:: pymove.models.pattern_mining.freq_seq_patterns
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | pymove.models.pattern\_mining.moving\_together\_patterns module
24 | ---------------------------------------------------------------
25 |
26 | .. automodule:: pymove.models.pattern_mining.moving_together_patterns
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | pymove.models.pattern\_mining.periodic\_patterns module
32 | -------------------------------------------------------
33 |
34 | .. automodule:: pymove.models.pattern_mining.periodic_patterns
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | Module contents
40 | ---------------
41 |
42 | .. automodule:: pymove.models.pattern_mining
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
--------------------------------------------------------------------------------
/docs/api/pymove.models.rst:
--------------------------------------------------------------------------------
1 | pymove.models package
2 | =====================
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | pymove.models.pattern_mining
11 |
12 | Submodules
13 | ----------
14 |
15 | pymove.models.anomaly\_detection module
16 | ---------------------------------------
17 |
18 | .. automodule:: pymove.models.anomaly_detection
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | pymove.models.classification module
24 | -----------------------------------
25 |
26 | .. automodule:: pymove.models.classification
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | Module contents
32 | ---------------
33 |
34 | .. automodule:: pymove.models
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
--------------------------------------------------------------------------------
/docs/api/pymove.preprocessing.rst:
--------------------------------------------------------------------------------
1 | pymove.preprocessing package
2 | ============================
3 |
4 | Submodules
5 | ----------
6 |
7 | pymove.preprocessing.compression module
8 | ---------------------------------------
9 |
10 | .. automodule:: pymove.preprocessing.compression
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | pymove.preprocessing.filters module
16 | -----------------------------------
17 |
18 | .. automodule:: pymove.preprocessing.filters
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | pymove.preprocessing.segmentation module
24 | ----------------------------------------
25 |
26 | .. automodule:: pymove.preprocessing.segmentation
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | pymove.preprocessing.stay\_point\_detection module
32 | --------------------------------------------------
33 |
34 | .. automodule:: pymove.preprocessing.stay_point_detection
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | Module contents
40 | ---------------
41 |
42 | .. automodule:: pymove.preprocessing
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
--------------------------------------------------------------------------------
/docs/api/pymove.query.rst:
--------------------------------------------------------------------------------
1 | pymove.query package
2 | ====================
3 |
4 | Submodules
5 | ----------
6 |
7 | pymove.query.query module
8 | -------------------------
9 |
10 | .. automodule:: pymove.query.query
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | Module contents
16 | ---------------
17 |
18 | .. automodule:: pymove.query
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
--------------------------------------------------------------------------------
/docs/api/pymove.rst:
--------------------------------------------------------------------------------
1 | pymove package
2 | ==============
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | pymove.core
11 | pymove.models
12 | pymove.preprocessing
13 | pymove.query
14 | pymove.semantic
15 | pymove.uncertainty
16 | pymove.utils
17 | pymove.visualization
18 |
19 | Module contents
20 | ---------------
21 |
22 | .. automodule:: pymove
23 | :members:
24 | :undoc-members:
25 | :show-inheritance:
26 |
--------------------------------------------------------------------------------
/docs/api/pymove.semantic.rst:
--------------------------------------------------------------------------------
1 | pymove.semantic package
2 | =======================
3 |
4 | Submodules
5 | ----------
6 |
7 | pymove.semantic.semantic module
8 | -------------------------------
9 |
10 | .. automodule:: pymove.semantic.semantic
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | Module contents
16 | ---------------
17 |
18 | .. automodule:: pymove.semantic
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
--------------------------------------------------------------------------------
/docs/api/pymove.uncertainty.rst:
--------------------------------------------------------------------------------
1 | pymove.uncertainty package
2 | ==========================
3 |
4 | Submodules
5 | ----------
6 |
7 | pymove.uncertainty.privacy module
8 | ---------------------------------
9 |
10 | .. automodule:: pymove.uncertainty.privacy
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | pymove.uncertainty.reducing module
16 | ----------------------------------
17 |
18 | .. automodule:: pymove.uncertainty.reducing
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | Module contents
24 | ---------------
25 |
26 | .. automodule:: pymove.uncertainty
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
--------------------------------------------------------------------------------
/docs/api/pymove.utils.rst:
--------------------------------------------------------------------------------
1 | pymove.utils package
2 | ====================
3 |
4 | Submodules
5 | ----------
6 |
7 | pymove.utils.constants module
8 | -----------------------------
9 |
10 | .. automodule:: pymove.utils.constants
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | pymove.utils.conversions module
16 | -------------------------------
17 |
18 | .. automodule:: pymove.utils.conversions
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | pymove.utils.data\_augmentation module
24 | --------------------------------------
25 |
26 | .. automodule:: pymove.utils.data_augmentation
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | pymove.utils.datetime module
32 | ----------------------------
33 |
34 | .. automodule:: pymove.utils.datetime
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | pymove.utils.distances module
40 | -----------------------------
41 |
42 | .. automodule:: pymove.utils.distances
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
47 | pymove.utils.geoutils module
48 | ----------------------------
49 |
50 | .. automodule:: pymove.utils.geoutils
51 | :members:
52 | :undoc-members:
53 | :show-inheritance:
54 |
55 | pymove.utils.integration module
56 | -------------------------------
57 |
58 | .. automodule:: pymove.utils.integration
59 | :members:
60 | :undoc-members:
61 | :show-inheritance:
62 |
63 | pymove.utils.log module
64 | -----------------------
65 |
66 | .. automodule:: pymove.utils.log
67 | :members:
68 | :undoc-members:
69 | :show-inheritance:
70 |
71 | pymove.utils.math module
72 | ------------------------
73 |
74 | .. automodule:: pymove.utils.math
75 | :members:
76 | :undoc-members:
77 | :show-inheritance:
78 |
79 | pymove.utils.mem module
80 | -----------------------
81 |
82 | .. automodule:: pymove.utils.mem
83 | :members:
84 | :undoc-members:
85 | :show-inheritance:
86 |
87 | pymove.utils.networkx module
88 | ----------------------------
89 |
90 | .. automodule:: pymove.utils.networkx
91 | :members:
92 | :undoc-members:
93 | :show-inheritance:
94 |
95 | pymove.utils.trajectories module
96 | --------------------------------
97 |
98 | .. automodule:: pymove.utils.trajectories
99 | :members:
100 | :undoc-members:
101 | :show-inheritance:
102 |
103 | pymove.utils.visual module
104 | --------------------------
105 |
106 | .. automodule:: pymove.utils.visual
107 | :members:
108 | :undoc-members:
109 | :show-inheritance:
110 |
111 | Module contents
112 | ---------------
113 |
114 | .. automodule:: pymove.utils
115 | :members:
116 | :undoc-members:
117 | :show-inheritance:
118 |
--------------------------------------------------------------------------------
/docs/api/pymove.visualization.rst:
--------------------------------------------------------------------------------
1 | pymove.visualization package
2 | ============================
3 |
4 | Submodules
5 | ----------
6 |
7 | pymove.visualization.folium module
8 | ----------------------------------
9 |
10 | .. automodule:: pymove.visualization.folium
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | pymove.visualization.matplotlib module
16 | --------------------------------------
17 |
18 | .. automodule:: pymove.visualization.matplotlib
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | Module contents
24 | ---------------
25 |
26 | .. automodule:: pymove.visualization
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | """Docs configuration."""
2 | # Configuration file for the Sphinx documentation builder.
3 | #
4 | # This file only contains a selection of the most common options. For a full
5 | # list see the documentation:
6 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
7 |
8 | # -- Path setup --------------------------------------------------------------
9 |
10 | # from datetime import datetime
11 | import os
12 | # If extensions (or modules to document with autodoc) are in another directory,
13 | # add these directories to sys.path here. If the directory is relative to the
14 | # documentation root, use os.path.abspath to make it absolute, like shown here.
15 | #
16 | import sys
17 |
18 | # If your extensions are in another directory, add it here. If the directory
19 | # is relative to the documentation root, use os.path.abspath to make it
20 | # absolute, like shown here.
21 | sys.path.append(os.path.join(os.path.dirname(__name__), '..'))
22 |
23 | master_doc = 'index'
24 |
25 | # -- Project information -----------------------------------------------------
26 |
27 | project = 'PyMove'
28 | copyright = '2020, Insight DataScience Lab'
29 | author = 'Insight DataScience Lab'
30 |
31 | # The full version, including alpha/beta/rc tags
32 | release = '2019'
33 |
34 | # -- General configuration ---------------------------------------------------
35 |
36 | # Add any Sphinx extension module names here, as strings. They can be
37 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
38 | # ones.
39 | extensions = [
40 | 'sphinx.ext.autodoc',
41 | 'sphinx.ext.napoleon',
42 | 'sphinx.ext.coverage',
43 | 'sphinx.ext.viewcode',
44 | 'sphinx.ext.todo',
45 | 'sphinx_rtd_theme'
46 | ]
47 |
48 | # Add any paths that contain templates here, relative to this directory.
49 | templates_path = ['_templates']
50 |
51 | # The language for content autogenerated by Sphinx. Refer to documentation
52 | # for a list of supported languages.
53 | #
54 | # This is also used if you do content translation via gettext catalogs.
55 | # Usually you set "language" from the command line for these cases.
56 | language = 'en'
57 |
58 | # List of patterns, relative to source directory, that match files and
59 | # directories to ignore when looking for source files.
60 | # This pattern also affects html_static_path and html_extra_path.
61 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
62 |
63 |
64 | # -- Options for HTML output -------------------------------------------------
65 |
66 | # The theme to use for HTML and HTML Help pages. See the documentation for
67 | # a list of builtin themes.
68 | #
69 | html_theme = 'sphinx_rtd_theme'
70 |
71 | # Add any paths that contain custom static files (such as style sheets) here,
72 | # relative to this directory. They are copied after the builtin static files,
73 | # so a file named "default.css" will overwrite the builtin "default.css".
74 | html_static_path = ['_static']
75 |
76 |
77 | # -- Extension configuration -------------------------------------------------
78 | napoleon_google_docstring = False
79 | napoleon_numpy_docstring = True
80 |
81 | # -- Options for todo extension ----------------------------------------------
82 |
83 | # If true, `todo` and `todoList` produce output, else they produce nothing.
84 | todo_include_todos = True
85 |
--------------------------------------------------------------------------------
/docs/examples/00_What_is_PyMove.rst:
--------------------------------------------------------------------------------
1 | 00 - What is PyMove?
2 | ====================
3 |
4 | **PyMove** is a Python library, open-source, that have operations to
5 | handling trajectories data, ranging from data representation,
6 | preprocessing operations, models, and visualization techniques.
7 |
8 | PyMove **proposes**: - A familiar and similar syntax to Pandas; - Clear
9 | documentation; - Extensibility, since you can implement your main data
10 | structure by manipulating other data structures such as Dask DataFrame,
11 | numpy arrays, etc., in addition to adding new modules; - Flexibility, as
12 | the user can switch between different data structures; - Operations for
13 | data preprocessing, pattern mining and data visualization.
14 |
15 | --------------
16 |
17 | Enviroment settings
18 | -------------------
19 |
20 | 1. Create an environment using **Conda**
21 |
22 | ``conda create -n validacao-pymove python=x.x``
23 |
24 | 2. Activate the environment
25 |
26 | ``conda activate validacao-pymove``
27 |
28 | Using PyMove
29 | ------------
30 |
31 | 1. Clone this repository
32 |
33 | ``git clone https://github.com/InsightLab/PyMove``
34 |
35 | 2. Make a branch developer
36 |
37 | ``git branch developer``
38 |
39 | 3. Switch to a new branch
40 |
41 | ``git checkout developer``
42 |
43 | 4. Make a pull of branch
44 |
45 | ``git pull origin developer``
46 |
47 | 5. Switch to folder PyMove
48 |
49 | ``cd PyMove``
50 |
51 | 6. Install in developer mode
52 |
53 | ``make dev``
54 |
55 | 7. Now, use this command to use PyMove!
56 |
57 | ``import pymove``
58 |
59 | What can you do with PyMove?
60 | ----------------------------
61 |
62 | With Pymove you can handling trajectories data with operations of: -
63 | Grid - Preprocessing: this including segmentation, compression, noise
64 | filter, stay point detection and map matching techniques. - Data
65 | Visualization: exploring differents techniques and channels to view your
66 | data!
67 |
68 | --------------
69 |
--------------------------------------------------------------------------------
/docs/examples/03_Exploring_Visualization_files/03_Exploring_Visualization_11_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/docs/examples/03_Exploring_Visualization_files/03_Exploring_Visualization_11_0.png
--------------------------------------------------------------------------------
/docs/examples/03_Exploring_Visualization_files/03_Exploring_Visualization_12_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/docs/examples/03_Exploring_Visualization_files/03_Exploring_Visualization_12_0.png
--------------------------------------------------------------------------------
/docs/examples/03_Exploring_Visualization_files/03_Exploring_Visualization_35_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/docs/examples/03_Exploring_Visualization_files/03_Exploring_Visualization_35_0.png
--------------------------------------------------------------------------------
/docs/examples/03_Exploring_Visualization_files/03_Exploring_Visualization_38_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/docs/examples/03_Exploring_Visualization_files/03_Exploring_Visualization_38_0.png
--------------------------------------------------------------------------------
/docs/examples/04_Exploring_Grid_files/04_Exploring_Grid_26_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/docs/examples/04_Exploring_Grid_files/04_Exploring_Grid_26_0.png
--------------------------------------------------------------------------------
/docs/examples/notebooks.rst:
--------------------------------------------------------------------------------
1 | example notebooks
2 | =================
3 |
4 | Notebooks
5 | ---------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | 00_What_is_PyMove
11 | 01_Exploring_MoveDataFrame
12 | 02_Exploring_Preprossessing
13 | 03_Exploring_Visualization
14 | 04_Exploring_Grid
15 | 05_Exploring_Utils
16 | 06_Exploring_Integrations
17 | 07_Exploring_Query
18 | 08_Exploring_Semantic
19 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. pymove documentation master file, created by
2 | sphinx-quickstart on Sat Jul 4 11:07:39 2020.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | ==================================
7 | Welcome to pymove's documentation!
8 | ==================================
9 |
10 | PyMove is a Python library for processing and visualization of trajectories and other spatial-temporal data.
11 |
12 | We will also release wrappers to some useful Java libraries frequently used in the mobility domain.
13 |
14 | Main Features
15 | -------------
16 |
17 | PyMove **proposes**:
18 |
19 | - A familiar and similar syntax to Pandas;
20 | - Clear documentation;
21 | - Extensibility, since you can implement your main data structure by manipulating other data structures such as Dask DataFrame, numpy arrays, etc., in addition to adding new modules;
22 | - Flexibility, as the user can switch between different data structures;
23 | - Operations for data preprocessing, pattern mining and data visualization.
24 |
25 | Installation
26 | ------------
27 |
28 | `Conda `__ instalation
29 | ---------------------------------------------------------------
30 |
31 | 1. ``conda install -c conda-forge pymove``
32 |
33 | `Pip `__ installation
34 | ------------------------------------------------------
35 |
36 | 1. ``pip install pymove``
37 |
38 |
39 | Indices and tables
40 | ==================
41 |
42 | * :ref:`genindex`
43 | * :ref:`modindex`
44 | * :ref:`search`
45 |
46 | .. toctree::
47 | :maxdepth: 4
48 | :caption: References
49 |
50 | api/pymove
51 | examples/notebooks
52 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx
2 | sphinx_rtd_theme
3 |
--------------------------------------------------------------------------------
/notebooks/00_What_is_PyMove.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# 00 - What is PyMove?\n",
8 | "\n",
9 | "**PyMove** is a Python library, open-source, that have operations to handling trajectories data, ranging from data representation, preprocessing operations, models, and visualization techniques.\n",
10 | "\n",
11 | "PyMove **proposes**:\n",
12 | "- A familiar and similar syntax to Pandas;\n",
13 | "- Clear documentation;\n",
14 | "- Extensibility, since you can implement your main data structure by manipulating other data structures such as Dask DataFrame, numpy arrays, etc., in addition to adding new modules;\n",
15 | "- Flexibility, as the user can switch between different data structures;\n",
16 | "- Operations for data preprocessing, pattern mining and data visualization.\n",
17 | "\n",
18 | "---\n",
19 | "\n",
20 | "## Enviroment settings\n",
21 | "\n",
22 | "1. Create an environment using **Conda**\n",
23 | "\n",
24 | "`conda create -n validacao-pymove python=x.x`\n",
25 | "\n",
26 | "2. Activate the environment\n",
27 | "\n",
28 | "`conda activate validacao-pymove`\n",
29 | "\n",
30 | "## Using PyMove\n",
31 | "1. Clone this repository\n",
32 | "\n",
33 | "`git clone https://github.com/InsightLab/PyMove`\n",
34 | "\n",
35 | "2. Make a branch developer\n",
36 | "\n",
37 | "`git branch developer`\n",
38 | "\n",
39 | "3. Switch to a new branch\n",
40 | "\n",
41 | "`git checkout developer`\n",
42 | "\n",
43 | "4. Make a pull of branch\n",
44 | "\n",
45 | "`git pull origin developer`\n",
46 | "\n",
47 | "5. Switch to folder PyMove\n",
48 | "\n",
49 | "`cd PyMove`\n",
50 | "\n",
51 | "6. Install in developer mode\n",
52 | "\n",
53 | "`make dev`\n",
54 | "\n",
55 | "\n",
56 | "7. Now, use this command to use PyMove!\n",
57 | "\n",
58 | "`import pymove`\n",
59 | "\n",
60 | "## What can you do with PyMove?\n",
61 | "\n",
62 | "With Pymove you can handling trajectories data with operations of:\n",
63 | "- Grid\n",
64 | "- Preprocessing: this including segmentation, compression, noise filter, stay point detection and map matching techniques.\n",
65 | "- Data Visualization: exploring differents techniques and channels to view your data!\n",
66 | "\n",
67 | "---"
68 | ]
69 | }
70 | ],
71 | "metadata": {
72 | "kernelspec": {
73 | "display_name": "Python 3",
74 | "language": "python",
75 | "name": "python3"
76 | },
77 | "language_info": {
78 | "codemirror_mode": {
79 | "name": "ipython",
80 | "version": 3
81 | },
82 | "file_extension": ".py",
83 | "mimetype": "text/x-python",
84 | "name": "python",
85 | "nbconvert_exporter": "python",
86 | "pygments_lexer": "ipython3",
87 | "version": "3.8.8"
88 | }
89 | },
90 | "nbformat": 4,
91 | "nbformat_minor": 4
92 | }
93 |
--------------------------------------------------------------------------------
/notebooks/README.md:
--------------------------------------------------------------------------------
1 | # Dataset Examples
2 |
3 | - [GeoLife Trajectory Dataset - GeoLife Data](https://www.microsoft.com/en-us/research/publication/geolife-gps-trajectory-dataset-user-guide/):
4 | A GPS trajectory dataset from MicrosoftResearch GeoLife project,
5 | collected by 182 users from April 2007 to August 2012.
6 |
7 | - [T-Drive Taxi Trajectories: T-Drive Data](https://www.microsoft.com/en-us/research/publication/t-drive-trajectory-data-sample/):
8 | Sample of trajectories from MicrosoftResearch T-Drive project,
9 | generated by over 10,000 taxi cabs in a week of 2008 in Beijing.
10 |
11 | - [Crimes in Boston](https://www.kaggle.com/AnalyzeBoston/crimes-in-boston):
12 | This is a dataset containing records from the new crime incident report system,
13 | which includes a reduced set of fields focused on capturing the type of
14 | incident as well as when and where it occurred. Records begin in June 14,
15 | 2015 and continue to September 3, 2018. The data is provided by Analyze Boston.
16 |
17 | - [Shared Cars Locations](https://www.kaggle.com/doit-intl/autotel-shared-car-locations):
18 | In order to reduce the number of owned cars, the city of Tel Aviv launched a
19 | shared-car project, called AutoTel. Users of the service are able to reserve
20 | a car using a mobile app, and pay for it by the minute. The project that was
21 | launched in October 2017 attracted over 7500 users, with more than 50% of them
22 | using the service at least once a week. AutoTel can predict the geospatial
23 | availability of cars at given times, and use predictions to modify
24 | their business model. They could, for example, modify prices so that it would
25 | e cheaper to park cars in high demand areas, or plan the the maintenance
26 | program so that cars will be collected from high-supply-low-demand areas
27 | and returned to areas of high demand.
28 |
29 | - [GPS Trajectories Data Set](https://archive.ics.uci.edu/ml/datasets/GPS+Trajectories):
30 | The dataset has been feed by Android app called Go!Track and is composed by
31 | two tables. The first table go_track_tracks presents general attributes and
32 | each instance has one trajectory that is represented by
33 | the tablego_track_trackspoints.
34 |
35 | - [Hurricanes and Typhoons](https://www.kaggle.com/noaa/hurricane-database):
36 | The NHC publishes the tropical cyclone historical database in a format
37 | known as HURDAT, short for HURricane DATabas
38 |
39 | - [Gowalla Dataset](https://drive.google.com/file/d/0BzpKyxX1dqTYRTFVYTd1UG81ZXc/view):
40 | This dataset was collected from Gowalla, a popular location-based social
41 | network, which has more than 600,000 users since November 2010 and was
42 | acquired by Facebook in December 2011. In practice, we used the
43 | Gowalla APIs to collect the user profiles, user friendship,
44 | location profiles, and users’ check-in history made efore June 1, 2011.
45 | Finally, we have obtained 36,001,959 check-ins made
46 | by 319,063 users over 2,844,076 locations.
47 |
48 | - [Weeplaces Dataset](https://drive.google.com/file/d/0BzpKyxX1dqTYYzRmUXRZMWloblU/view):
49 | This dataset is collected from Weeplaces, a website that aims to visualize
50 | users’ check-in activities in location-based social networks (LBSN).
51 | It is now integrated with the APIs of other location-based social networking
52 | services, e.g., Facebook Places, Foursquare, and Gowalla. Users can login
53 | Weeplaces using their LBSN accounts and connect with their friends in the
54 | same LBSN who have also used this application. All the crawled data is
55 | originally generated in Foursquare. This dataset contains 7,658,368 check-ins
56 | generated by 15,799 users over 971,309 locations. In the data collection,
57 | we can’t get the original Foursquare IDs of the Weeplaces users.
58 | We can only get their check-in history, their friends who also use Weeplaces,
59 | and other additional information about the locations.
60 |
61 | - [Trucks](http://www.chorochronos.org/Default.aspx?tabid=71&iditem=31):
62 | Trucks dataset consists of 276 trajectories of 50 trucks
63 | delivering concrete to several construction places around
64 | Athens metropolitan area in Greece for 33 distinct days.
65 |
--------------------------------------------------------------------------------
/notebooks/extracao_geolife.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 14,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import pandas as pd\n",
11 | "import glob\n",
12 | "import os.path\n",
13 | "import datetime\n",
14 | "import os\n",
15 | "\n",
16 | "def read_plt(plt_file):\n",
17 | " points = pd.read_csv(plt_file, skiprows=6, header=None,\n",
18 | " parse_dates=[[5, 6]], infer_datetime_format=True)\n",
19 | "\n",
20 | " # for clarity rename columns\n",
21 | " points.rename(inplace=True, columns={0: 'lat', 1: 'lon', 3: 'alt', '5_6': 'time'})\n",
22 | "\n",
23 | " # remove unused columns\n",
24 | " points.drop(inplace=True, columns=[2, 4])\n",
25 | "\n",
26 | " return points\n",
27 | "\n",
28 | "mode_names = ['walk', 'bike', 'bus', 'car', 'subway','train', 'airplane', 'boat', 'run', 'motorcycle', 'taxi']\n",
29 | "mode_ids = {s : i + 1 for i, s in enumerate(mode_names)}\n",
30 | "\n",
31 | "def read_labels(labels_file):\n",
32 | " labels = pd.read_csv(labels_file, skiprows=1, header=None,\n",
33 | " parse_dates=[[0, 1], [2, 3]],\n",
34 | " infer_datetime_format=True, delim_whitespace=True)\n",
35 | "\n",
36 | " # for clarity rename columns\n",
37 | " labels.columns = ['start_time', 'end_time', 'label']\n",
38 | "\n",
39 | " # replace 'label' column with integer encoding\n",
40 | " labels['label'] = [mode_ids[i] for i in labels['label']]\n",
41 | "\n",
42 | " return labels\n",
43 | "\n",
44 | "def apply_labels(points, labels):\n",
45 | " indices = labels['start_time'].searchsorted(points['time'], side='right') - 1\n",
46 | " no_label = (indices < 0) | (points['time'].values >= labels['end_time'].iloc[indices].values)\n",
47 | " points['label'] = labels['label'].iloc[indices].values\n",
48 | " points['label'][no_label] = 0\n",
49 | "\n",
50 | "def read_user(user_folder):\n",
51 | " labels = None\n",
52 | "\n",
53 | " plt_files = glob.glob(os.path.join(user_folder, 'Trajectory', '*.plt'))\n",
54 | " df = pd.concat([read_plt(f) for f in plt_files])\n",
55 | "\n",
56 | " labels_file = os.path.join(user_folder, 'labels.txt')\n",
57 | " if os.path.exists(labels_file):\n",
58 | " labels = read_labels(labels_file)\n",
59 | " apply_labels(df, labels)\n",
60 | " else:\n",
61 | " df['label'] = 0\n",
62 | "\n",
63 | " return df\n",
64 | "\n",
65 | "def read_all_users(folder):\n",
66 | " subfolders = os.listdir(folder)\n",
67 | " dfs = []\n",
68 | " for i, sf in enumerate(subfolders):\n",
69 | " print('[%d/%d] processing user %s' % (i + 1, len(subfolders), sf))\n",
70 | " df = read_user(os.path.join(folder,sf))\n",
71 | " df['user'] = int(sf)\n",
72 | " dfs.append(df)\n",
73 | " return pd.concat(dfs)"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": 18,
79 | "metadata": {},
80 | "outputs": [
81 | {
82 | "name": "stdout",
83 | "output_type": "stream",
84 | "text": [
85 | "[1/4] processing user 000\n",
86 | "[2/4] processing user 010\n"
87 | ]
88 | },
89 | {
90 | "name": "stderr",
91 | "output_type": "stream",
92 | "text": [
93 | "C:\\Users\\andre\\AppData\\Local\\Continuum\\miniconda3\\lib\\site-packages\\ipykernel_launcher.py:40: SettingWithCopyWarning: \n",
94 | "A value is trying to be set on a copy of a slice from a DataFrame\n",
95 | "\n",
96 | "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n"
97 | ]
98 | },
99 | {
100 | "name": "stdout",
101 | "output_type": "stream",
102 | "text": [
103 | "[3/4] processing user 011\n",
104 | "[4/4] processing user 100\n"
105 | ]
106 | }
107 | ],
108 | "source": [
109 | "df = read_all_users('Geolife Trajectories 1.3\\Geolife Trajectories 1.3\\Data')"
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": 20,
115 | "metadata": {},
116 | "outputs": [
117 | {
118 | "data": {
119 | "text/html": [
120 | "\n",
121 | "\n",
134 | "
\n",
135 | " \n",
136 | " \n",
137 | " | \n",
138 | " time | \n",
139 | " lat | \n",
140 | " lon | \n",
141 | " alt | \n",
142 | " label | \n",
143 | " user | \n",
144 | "
\n",
145 | " \n",
146 | " \n",
147 | " \n",
148 | " 0 | \n",
149 | " 2008-10-23 02:53:04 | \n",
150 | " 39.984702 | \n",
151 | " 116.318417 | \n",
152 | " 492.0 | \n",
153 | " 0 | \n",
154 | " 0 | \n",
155 | "
\n",
156 | " \n",
157 | " 1 | \n",
158 | " 2008-10-23 02:53:10 | \n",
159 | " 39.984683 | \n",
160 | " 116.318450 | \n",
161 | " 492.0 | \n",
162 | " 0 | \n",
163 | " 0 | \n",
164 | "
\n",
165 | " \n",
166 | " 2 | \n",
167 | " 2008-10-23 02:53:15 | \n",
168 | " 39.984686 | \n",
169 | " 116.318417 | \n",
170 | " 492.0 | \n",
171 | " 0 | \n",
172 | " 0 | \n",
173 | "
\n",
174 | " \n",
175 | " 3 | \n",
176 | " 2008-10-23 02:53:20 | \n",
177 | " 39.984688 | \n",
178 | " 116.318385 | \n",
179 | " 492.0 | \n",
180 | " 0 | \n",
181 | " 0 | \n",
182 | "
\n",
183 | " \n",
184 | " 4 | \n",
185 | " 2008-10-23 02:53:25 | \n",
186 | " 39.984655 | \n",
187 | " 116.318263 | \n",
188 | " 492.0 | \n",
189 | " 0 | \n",
190 | " 0 | \n",
191 | "
\n",
192 | " \n",
193 | "
\n",
194 | "
"
195 | ],
196 | "text/plain": [
197 | " time lat lon alt label user\n",
198 | "0 2008-10-23 02:53:04 39.984702 116.318417 492.0 0 0\n",
199 | "1 2008-10-23 02:53:10 39.984683 116.318450 492.0 0 0\n",
200 | "2 2008-10-23 02:53:15 39.984686 116.318417 492.0 0 0\n",
201 | "3 2008-10-23 02:53:20 39.984688 116.318385 492.0 0 0\n",
202 | "4 2008-10-23 02:53:25 39.984655 116.318263 492.0 0 0"
203 | ]
204 | },
205 | "execution_count": 20,
206 | "metadata": {},
207 | "output_type": "execute_result"
208 | }
209 | ],
210 | "source": [
211 | "df.head()"
212 | ]
213 | },
214 | {
215 | "cell_type": "code",
216 | "execution_count": 21,
217 | "metadata": {},
218 | "outputs": [],
219 | "source": [
220 | "df.to_csv('geolife.csv')"
221 | ]
222 | },
223 | {
224 | "cell_type": "code",
225 | "execution_count": null,
226 | "metadata": {},
227 | "outputs": [],
228 | "source": []
229 | }
230 | ],
231 | "metadata": {
232 | "kernelspec": {
233 | "display_name": "Python 3",
234 | "language": "python",
235 | "name": "python3"
236 | },
237 | "language_info": {
238 | "codemirror_mode": {
239 | "name": "ipython",
240 | "version": 3
241 | },
242 | "file_extension": ".py",
243 | "mimetype": "text/x-python",
244 | "name": "python",
245 | "nbconvert_exporter": "python",
246 | "pygments_lexer": "ipython3",
247 | "version": "3.7.1"
248 | }
249 | },
250 | "nbformat": 4,
251 | "nbformat_minor": 2
252 | }
253 |
--------------------------------------------------------------------------------
/pymove/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | PyMove.
3 |
4 | Provides processing and visualization of trajectories and other
5 | spatial-temporal data
6 |
7 | """
8 |
9 | from .core import grid
10 | from .core.dask import DaskMoveDataFrame
11 | from .core.dataframe import MoveDataFrame
12 | from .core.grid import Grid
13 | from .core.pandas import PandasMoveDataFrame
14 | from .core.pandas_discrete import PandasDiscreteMoveDataFrame
15 | from .models.pattern_mining import clustering
16 | from .preprocessing import compression, filters, segmentation, stay_point_detection
17 | from .query import query
18 | from .semantic import semantic
19 | from .utils import (
20 | constants,
21 | conversions,
22 | data_augmentation,
23 | datetime,
24 | distances,
25 | geoutils,
26 | integration,
27 | log,
28 | math,
29 | mem,
30 | trajectories,
31 | visual,
32 | )
33 | from .utils.trajectories import read_csv
34 | from .visualization import folium, matplotlib
35 |
36 | __version__ = '3.0.1'
37 |
--------------------------------------------------------------------------------
/pymove/core/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains the core of PyMove.
3 |
4 | MoveDataFrame,
5 | PandasMoveDataFrame,
6 | DaskMoveDataFrame,
7 | PandasDiscreteMoveDataFrame,
8 | Grid
9 |
10 | """
11 |
12 | from .interface import MoveDataFrameAbstractModel
13 |
14 | __all__ = ['MoveDataFrameAbstractModel']
15 |
--------------------------------------------------------------------------------
/pymove/core/dataframe.py:
--------------------------------------------------------------------------------
1 | """MoveDataFrame class."""
2 | from __future__ import annotations
3 |
4 | from typing import TYPE_CHECKING
5 |
6 | from dateutil.parser._parser import ParserError
7 | from pandas.core.frame import DataFrame
8 |
9 | from pymove.utils.constants import (
10 | DATETIME,
11 | LATITUDE,
12 | LONGITUDE,
13 | TRAJ_ID,
14 | TYPE_DASK,
15 | TYPE_PANDAS,
16 | )
17 |
18 | if TYPE_CHECKING:
19 | from pymove.core.dask import DaskMoveDataFrame
20 | from pymove.core.pandas import PandasMoveDataFrame
21 |
22 |
23 | class MoveDataFrame:
24 | """Auxiliary class to check and transform data into Pymove Dataframes."""
25 |
26 | @staticmethod
27 | def __new__( # type: ignore[misc]
28 | self,
29 | data: DataFrame | dict | list,
30 | latitude: str = LATITUDE,
31 | longitude: str = LONGITUDE,
32 | datetime: str = DATETIME,
33 | traj_id: str = TRAJ_ID,
34 | type_: str = TYPE_PANDAS,
35 | n_partitions: int = 1,
36 | ) -> 'PandasMoveDataFrame' | 'DaskMoveDataFrame':
37 | """
38 | Creates the PyMove dataframe, which must contain latitude, longitude and datetime.
39 |
40 | The dataframe can be a pandas or dask dataframe.
41 |
42 | Parameters
43 | ----------
44 | data : DataFrame or PandasMoveDataFrame or dict or list
45 | Input trajectory data.
46 | latitude : str, optional
47 | Represents column name latitude, by default LATITUDE
48 | longitude : str, optional
49 | Represents column name longitude, by default LONGITUDE
50 | datetime : str, optional
51 | Represents column name datetime, by default DATETIME
52 | traj_id : str, optional
53 | Represents column name trajectory id, by default TRAJ_ID
54 | type_ : str, optional
55 | Number of partitions of the dask dataframe, by default TYPE_PANDAS
56 | n_partitions : int, optional
57 | Amount of partitions for dask dataframe, by default 1
58 |
59 | Raises
60 | ------
61 | KeyError
62 | If missing one of lat, lon, datetime columns
63 | ValueError, ParserError
64 | If the data types can't be converted.
65 |
66 | """
67 | if type_ == TYPE_PANDAS:
68 | from pymove.core.pandas import PandasMoveDataFrame
69 | return PandasMoveDataFrame(
70 | data, latitude, longitude, datetime, traj_id
71 | )
72 | if type_ == TYPE_DASK:
73 | from pymove.core.dask import DaskMoveDataFrame
74 | return DaskMoveDataFrame(
75 | data, latitude, longitude, datetime, traj_id, n_partitions
76 | )
77 | raise TypeError(
78 | f'Unknown MoveDataFrame type {type_}, use {TYPE_PANDAS} or {TYPE_DASK}'
79 | )
80 |
81 | @staticmethod
82 | def has_columns(data: DataFrame) -> bool:
83 | """
84 | Checks whether the received dataset has 'lat', 'lon', 'datetime' columns.
85 |
86 | Parameters
87 | ----------
88 | data : DataFrame
89 | Input trajectory data
90 |
91 | Returns
92 | -------
93 | bool
94 | Represents whether or not you have the required columns
95 |
96 | """
97 | cols = data.columns
98 | if LATITUDE in cols and LONGITUDE in cols and DATETIME in cols:
99 | return True
100 | return False
101 |
102 | @staticmethod
103 | def validate_move_data_frame(data: DataFrame):
104 | """
105 | Converts the column type to the default type used by PyMove lib.
106 |
107 | Parameters
108 | ----------
109 | data : DataFrame
110 | Input trajectory data
111 |
112 | Raises
113 | ------
114 | KeyError
115 | If missing one of lat, lon, datetime columns
116 | ValueError, ParserError
117 | If the data types can't be converted
118 |
119 | """
120 | try:
121 | if data.dtypes[LATITUDE] != 'float64':
122 | data[LATITUDE] = data[LATITUDE].astype('float64')
123 | if data.dtypes[LONGITUDE] != 'float64':
124 | data[LONGITUDE] = data[LONGITUDE].astype('float64')
125 | if data.dtypes[DATETIME] != 'datetime64[ns]':
126 | data[DATETIME] = data[DATETIME].astype('datetime64[ns]')
127 | except KeyError:
128 | raise KeyError('dataframe missing one of lat, lon, datetime columns.')
129 | except ParserError:
130 | raise ParserError('datetime column cannot be parsed')
131 | except ValueError:
132 | raise ValueError('dtypes cannot be converted.')
133 |
134 | @staticmethod
135 | def format_labels(
136 | current_id: str, current_lat: str, current_lon: str, current_datetime: str
137 | ) -> dict:
138 | """
139 | Format the labels for the PyMove lib pattern labels output lat, lon and datatime.
140 |
141 | Parameters
142 | ----------
143 | current_id : str
144 | Represents the column name of feature id
145 | current_lat : str
146 | Represents the column name of feature latitude
147 | current_lon : str
148 | Represents the column name of feature longitude
149 | current_datetime : str
150 | Represents the column name of feature datetime
151 |
152 | Returns
153 | -------
154 | Dict
155 | Represents a dict with mapping current columns of data
156 | to format of PyMove column.
157 |
158 | """
159 | return {
160 | current_id: TRAJ_ID,
161 | current_lon: LONGITUDE,
162 | current_lat: LATITUDE,
163 | current_datetime: DATETIME
164 | }
165 |
--------------------------------------------------------------------------------
/pymove/core/interface.py:
--------------------------------------------------------------------------------
1 | import abc
2 |
3 |
4 | class MoveDataFrameAbstractModel(abc.ABC):
5 | @abc.abstractmethod
6 | def lat(self):
7 | pass
8 |
9 | @abc.abstractmethod
10 | def lng(self):
11 | pass
12 |
13 | @abc.abstractmethod
14 | def datetime(self):
15 | pass
16 |
17 | @abc.abstractmethod
18 | def loc(self):
19 | pass
20 |
21 | @abc.abstractmethod
22 | def iloc(self):
23 | pass
24 |
25 | @abc.abstractmethod
26 | def at(self):
27 | pass
28 |
29 | @abc.abstractmethod
30 | def values(self):
31 | pass
32 |
33 | @abc.abstractmethod
34 | def columns(self):
35 | pass
36 |
37 | @abc.abstractmethod
38 | def index(self):
39 | pass
40 |
41 | @abc.abstractmethod
42 | def dtypes(self):
43 | pass
44 |
45 | @abc.abstractmethod
46 | def shape(self):
47 | pass
48 |
49 | @abc.abstractmethod
50 | def rename(self):
51 | pass
52 |
53 | @abc.abstractmethod
54 | def len(self):
55 | pass
56 |
57 | @abc.abstractmethod
58 | def head(self):
59 | pass
60 |
61 | @abc.abstractmethod
62 | def tail(self):
63 | pass
64 |
65 | @abc.abstractmethod
66 | def get_users_number(self):
67 | pass
68 |
69 | @abc.abstractmethod
70 | def to_numpy(self):
71 | pass
72 |
73 | @abc.abstractmethod
74 | def to_dict(self):
75 | pass
76 |
77 | @abc.abstractmethod
78 | def to_grid(self):
79 | pass
80 |
81 | @abc.abstractmethod
82 | def to_data_frame(self):
83 | pass
84 |
85 | @abc.abstractmethod
86 | def info(self):
87 | pass
88 |
89 | @abc.abstractmethod
90 | def describe(self):
91 | pass
92 |
93 | @abc.abstractmethod
94 | def memory_usage(self):
95 | pass
96 |
97 | @abc.abstractmethod
98 | def copy(self):
99 | pass
100 |
101 | @abc.abstractmethod
102 | def generate_tid_based_on_id_datetime(self):
103 | pass
104 |
105 | @abc.abstractmethod
106 | def generate_date_features(self):
107 | pass
108 |
109 | @abc.abstractmethod
110 | def generate_hour_features(self):
111 | pass
112 |
113 | @abc.abstractmethod
114 | def generate_day_of_the_week_features(self):
115 | pass
116 |
117 | @abc.abstractmethod
118 | def generate_weekend_features(self):
119 | pass
120 |
121 | @abc.abstractmethod
122 | def generate_time_of_day_features(self):
123 | pass
124 |
125 | @abc.abstractmethod
126 | def generate_datetime_in_format_cyclical(self):
127 | pass
128 |
129 | @abc.abstractmethod
130 | def generate_dist_time_speed_features(self):
131 | pass
132 |
133 | @abc.abstractmethod
134 | def generate_dist_features(self):
135 | pass
136 |
137 | @abc.abstractmethod
138 | def generate_time_features(self):
139 | pass
140 |
141 | @abc.abstractmethod
142 | def generate_speed_features(self):
143 | pass
144 |
145 | @abc.abstractmethod
146 | def generate_move_and_stop_by_radius(self):
147 | pass
148 |
149 | @abc.abstractmethod
150 | def time_interval(self):
151 | pass
152 |
153 | @abc.abstractmethod
154 | def get_bbox(self):
155 | pass
156 |
157 | @abc.abstractmethod
158 | def plot_all_features(self):
159 | pass
160 |
161 | @abc.abstractmethod
162 | def plot_trajs(self):
163 | pass
164 |
165 | @abc.abstractmethod
166 | def plot_traj_id(self):
167 | pass
168 |
169 | @abc.abstractmethod
170 | def show_trajectories_info(self):
171 | pass
172 |
173 | @abc.abstractmethod
174 | def min(self):
175 | pass
176 |
177 | @abc.abstractmethod
178 | def max(self):
179 | pass
180 |
181 | @abc.abstractmethod
182 | def count(self):
183 | pass
184 |
185 | @abc.abstractmethod
186 | def groupby(self):
187 | pass
188 |
189 | @abc.abstractmethod
190 | def plot(self):
191 | pass
192 |
193 | @abc.abstractmethod
194 | def select_dtypes(self):
195 | pass
196 |
197 | @abc.abstractmethod
198 | def astype(self):
199 | pass
200 |
201 | @abc.abstractmethod
202 | def sort_values(self):
203 | pass
204 |
205 | @abc.abstractmethod
206 | def reset_index(self):
207 | pass
208 |
209 | @abc.abstractmethod
210 | def set_index(self):
211 | pass
212 |
213 | @abc.abstractmethod
214 | def drop(self):
215 | pass
216 |
217 | @abc.abstractmethod
218 | def duplicated(self):
219 | pass
220 |
221 | @abc.abstractmethod
222 | def drop_duplicates(self):
223 | pass
224 |
225 | @abc.abstractmethod
226 | def shift(self):
227 | pass
228 |
229 | @abc.abstractmethod
230 | def all(self):
231 | pass
232 |
233 | @abc.abstractmethod
234 | def any(self):
235 | pass
236 |
237 | @abc.abstractmethod
238 | def isna(self):
239 | pass
240 |
241 | @abc.abstractmethod
242 | def fillna(self):
243 | pass
244 |
245 | @abc.abstractmethod
246 | def dropna(self):
247 | pass
248 |
249 | @abc.abstractmethod
250 | def sample(self):
251 | pass
252 |
253 | @abc.abstractmethod
254 | def isin(self):
255 | pass
256 |
257 | @abc.abstractmethod
258 | def append(self):
259 | pass
260 |
261 | @abc.abstractmethod
262 | def join(self):
263 | pass
264 |
265 | @abc.abstractmethod
266 | def merge(self):
267 | pass
268 |
269 | @abc.abstractmethod
270 | def nunique(self):
271 | pass
272 |
273 | @abc.abstractmethod
274 | def to_csv(self):
275 | pass
276 |
277 | @abc.abstractmethod
278 | def write_file(self):
279 | pass
280 |
281 | @abc.abstractmethod
282 | def convert_to(self, new_type: str):
283 | pass
284 |
285 | @abc.abstractmethod
286 | def get_type(self):
287 | pass
288 |
--------------------------------------------------------------------------------
/pymove/models/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains models to perform operations on trajectories.
3 |
4 | pattern_mining,
5 | anomaly_detection,
6 | classification
7 |
8 | """
9 |
--------------------------------------------------------------------------------
/pymove/models/anomaly_detection.py:
--------------------------------------------------------------------------------
1 | """Not implemented."""
2 |
--------------------------------------------------------------------------------
/pymove/models/classification.py:
--------------------------------------------------------------------------------
1 | """Not implemented."""
2 |
--------------------------------------------------------------------------------
/pymove/models/pattern_mining/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains models to detect patterns on trajectories.
3 |
4 | clustering,
5 | freq_seq_patterns,
6 | moving_together_patterns,
7 | periodic_patterns
8 |
9 | """
10 |
--------------------------------------------------------------------------------
/pymove/models/pattern_mining/clustering.py:
--------------------------------------------------------------------------------
1 | """
2 | Clustering operations.
3 |
4 | elbow_method,
5 | gap_statistic,
6 | dbscan_clustering
7 |
8 | """
9 | from __future__ import annotations
10 |
11 | from typing import Callable
12 |
13 | import numpy as np
14 | from pandas import DataFrame
15 | from sklearn.cluster import DBSCAN, KMeans
16 |
17 | from pymove.utils.constants import EARTH_RADIUS, LATITUDE, LONGITUDE, N_CLUSTER
18 | from pymove.utils.conversions import meters_to_eps
19 | from pymove.utils.log import logger, progress_bar, timer_decorator
20 |
21 |
22 | @timer_decorator
23 | def elbow_method(
24 | move_data: DataFrame,
25 | k_initial: int = 1,
26 | max_clusters: int = 15,
27 | k_iteration: int = 1,
28 | random_state: int | None = None
29 | ) -> dict:
30 | """
31 | Determines the optimal number of clusters.
32 |
33 | In the range set by the user using the elbow method.
34 |
35 | Parameters
36 | ----------
37 | move_data : dataframe
38 | The input trajectory data.
39 | k_initial: int, optional
40 | The initial value used in the interaction of the elbow method.
41 | Represents the maximum numbers of clusters, by default 1
42 | max_clusters: int, optional
43 | The maximum value used in the interaction of the elbow method.
44 | Maximum number of clusters to test for, by default 15
45 | k_iteration: int, optional
46 | Increment value of the sequence used by the elbow method, by default 1
47 | random_state: int, RandomState instance
48 | Determines random number generation for centroid initialization.
49 | Use an int to make the randomness deterministic, by default None
50 |
51 | Returns
52 | -------
53 | dict
54 | The inertia values for the different numbers of clusters
55 |
56 | Example
57 | -------
58 | clustering.elbow_method(move_data=move_df, k_iteration=3)
59 | {
60 | 1: 55084.15957839036,
61 | 4: 245.68365592382938,
62 | 7: 92.31472644640075,
63 | 10: 62.618599956870355,
64 | 13: 45.59653757292055,
65 | }
66 |
67 | """
68 | message = 'Executing Elbow Method for {} to {} clusters at {} steps\n'.format(
69 | k_initial, max_clusters, k_iteration
70 | )
71 | logger.debug(message)
72 | inertia_dic = {}
73 | for k in progress_bar(
74 | range(k_initial, max_clusters + 1, k_iteration), desc='Running KMeans'
75 | ):
76 | km = KMeans(n_clusters=k, random_state=random_state)
77 | inertia_dic[k] = km.fit(move_data[[LATITUDE, LONGITUDE]]).inertia_
78 | return inertia_dic
79 |
80 |
81 | @timer_decorator
82 | def gap_statistic(
83 | move_data: DataFrame,
84 | nrefs: int = 3,
85 | k_initial: int = 1,
86 | max_clusters: int = 15,
87 | k_iteration: int = 1,
88 | random_state: int | None = None
89 | ) -> dict:
90 | """
91 | Calculates optimal clusters numbers using Gap Statistic.
92 |
93 | From Tibshirani, Walther, Hastie.
94 |
95 | Parameters
96 | ----------
97 | move_data: ndarray of shape (n_samples, n_features).
98 | The input trajectory data.
99 | nrefs: int, optional
100 | number of sample reference datasets to create, by default 3
101 | k_initial: int, optional.
102 | The initial value used in the interaction of the elbow method, by default 1
103 | Represents the maximum numbers of clusters.
104 | max_clusters: int, optional
105 | Maximum number of clusters to test for, by default 15
106 | k_iteration:int, optional
107 | Increment value of the sequence used by the elbow method, by default 1
108 | random_state: int, RandomState instance
109 | Determines random number generation for centroid initialization.
110 | Use an int to make the randomness deterministic, by default None
111 |
112 | Returns
113 | -------
114 | dict
115 | The error value for each cluster number
116 |
117 | Notes
118 | -----
119 | https://anaconda.org/milesgranger/gap-statistic/notebook
120 |
121 | """
122 | message = 'Executing Gap Statistic for {} to {} clusters at {} steps\n'.format(
123 | k_initial, max_clusters, k_iteration
124 | )
125 | logger.debug(message)
126 | gaps = {}
127 | np.random.seed(random_state)
128 | for k in progress_bar(
129 | range(k_initial, max_clusters + 1, k_iteration), desc='Running KMeans'
130 | ):
131 | # Holder for reference dispersion results
132 | ref_disps = np.zeros(nrefs)
133 | # For n references, generate random sample and perform kmeans
134 | # getting resulting dispersion of each loop
135 | for i in range(nrefs):
136 | # Create new random reference set
137 | random_reference = np.random.random_sample(size=move_data.shape)
138 | # Fit to it
139 | km = KMeans(n_clusters=k, random_state=random_state)
140 | ref_disps[i] = km.fit(random_reference).inertia_
141 | # Fit cluster to original data and create dispersion
142 | km = KMeans(k).fit(move_data[[LATITUDE, LONGITUDE]])
143 | orig_disp = km.inertia_
144 | # Calculate gap statistic
145 | gap = np.log(np.mean(ref_disps)) - np.log(orig_disp)
146 | # Assign this loop gap statistic to gaps
147 | gaps[k] = gap
148 |
149 | return gaps
150 |
151 |
152 | @timer_decorator
153 | def dbscan_clustering(
154 | move_data: DataFrame,
155 | cluster_by: str,
156 | meters: int = 10,
157 | min_sample: float = 1680 / 2,
158 | earth_radius: float = EARTH_RADIUS,
159 | metric: str | Callable = 'euclidean',
160 | inplace: bool = False
161 | ) -> DataFrame | None:
162 | """
163 | Performs density based clustering on the move_dataframe according to cluster_by.
164 |
165 | Parameters
166 | ----------
167 | move_data : dataframe
168 | the input trajectory
169 | cluster_by : str
170 | the colum to cluster
171 | meters : int, optional
172 | distance to use in the clustering, by default 10
173 | min_sample : float, optional
174 | the minimum number of samples to consider a cluster, by default 1680/2
175 | earth_radius : int
176 | Y offset from your original position in meters, by default EARTH_RADIUS
177 | metric: string, or callable, optional
178 | The metric to use when calculating distance between instances in a feature array
179 | by default 'euclidean'
180 | inplace : bool, optional
181 | Whether to return a new DataFrame, by default False
182 |
183 | Returns
184 | -------
185 | DataFrame
186 | Clustered dataframe or None
187 | """
188 | if not inplace:
189 | move_data = move_data[:]
190 | move_data.reset_index(drop=True, inplace=True)
191 |
192 | move_data[N_CLUSTER] = -1
193 |
194 | for cluster_id in progress_bar(move_data[cluster_by].unique(), desc='Clustering'):
195 |
196 | df_filter = move_data[move_data[cluster_by] == cluster_id]
197 |
198 | dbscan = DBSCAN(
199 | eps=meters_to_eps(meters, earth_radius),
200 | min_samples=min_sample,
201 | metric=metric
202 | )
203 | dbscan_result = dbscan.fit(df_filter[[LATITUDE, LONGITUDE]].to_numpy())
204 |
205 | idx = df_filter.index
206 | res = dbscan_result.labels_ + move_data[N_CLUSTER].max() + 1
207 | move_data.at[idx, N_CLUSTER] = res
208 |
209 | if not inplace:
210 | return move_data
211 |
--------------------------------------------------------------------------------
/pymove/models/pattern_mining/freq_seq_patterns.py:
--------------------------------------------------------------------------------
1 | """Not implemented."""
2 |
--------------------------------------------------------------------------------
/pymove/models/pattern_mining/moving_together_patterns.py:
--------------------------------------------------------------------------------
1 | """Not implemented."""
2 |
--------------------------------------------------------------------------------
/pymove/models/pattern_mining/periodic_patterns.py:
--------------------------------------------------------------------------------
1 | """Not implemented."""
2 |
--------------------------------------------------------------------------------
/pymove/preprocessing/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains functions to preprocess the dataframes for manipulation.
3 |
4 | compression,
5 | filters,
6 | segmentation,
7 | stay_point_detection
8 |
9 | """
10 |
--------------------------------------------------------------------------------
/pymove/preprocessing/compression.py:
--------------------------------------------------------------------------------
1 | """
2 | Compression operations.
3 |
4 | compress_segment_stop_to_point
5 |
6 | """
7 | from __future__ import annotations
8 |
9 | import numpy as np
10 | from pandas import DataFrame
11 |
12 | from pymove.preprocessing.stay_point_detection import (
13 | create_or_update_move_stop_by_dist_time,
14 | )
15 | from pymove.utils.constants import (
16 | LAT_MEAN,
17 | LATITUDE,
18 | LON_MEAN,
19 | LONGITUDE,
20 | SEGMENT_STOP,
21 | STOP,
22 | TRAJ_ID,
23 | )
24 | from pymove.utils.log import logger, progress_bar, timer_decorator
25 |
26 |
27 | @timer_decorator
28 | def compress_segment_stop_to_point(
29 | move_data: DataFrame,
30 | label_segment: str = SEGMENT_STOP,
31 | label_stop: str = STOP,
32 | point_mean: str = 'default',
33 | drop_moves: bool = False,
34 | label_id: str = TRAJ_ID,
35 | dist_radius: float = 30,
36 | time_radius: float = 900,
37 | inplace: bool = False,
38 | ) -> DataFrame:
39 | """
40 | Compress the trajectories using the stop points in the dataframe.
41 |
42 | Compress a segment to point setting lat_mean e lon_mean to each segment.
43 |
44 | Parameters
45 | ----------
46 | move_data : dataframe
47 | The input trajectory data
48 | label_segment : String, optional
49 | The label of the column containing the ids of the formed segments.
50 | Is the new splitted id, by default SEGMENT_STOP
51 | label_stop : String, optional
52 | Is the name of the column that indicates if a point is a stop, by default STOP
53 | point_mean : String, optional
54 | Indicates whether the mean points should be calculated using
55 | centroids or the point that repeat the most, by default 'default'
56 | drop_moves : Boolean, optional
57 | If set to true, the moving points will be dropped from the dataframe,
58 | by default False
59 | label_id : String, optional
60 | Used to create the stay points used in the compression.
61 | If the dataset already has the stop move, this
62 | parameter should be ignored.
63 | Indicates the label of the id column in the user dataframe, by default TRAJ_ID
64 | dist_radius : Double, optional
65 | Used to create the stay points used in the compression, by default 30
66 | If the dataset already has the stop move, this
67 | parameter should be ignored.
68 | The first step in this function is segmenting the trajectory.
69 | The segments are used to find the stop points.
70 | The dist_radius defines the distance used in the segmentation.
71 | time_radius : Double, optional
72 | Used to create the stay points used in the compression, by default 900
73 | If the dataset already has the stop move, this
74 | parameter should be ignored.
75 | The time_radius used to determine if a segment is a stop.
76 | If the user stayed in the segment for a time
77 | greater than time_radius, than the segment is a stop.
78 | inplace : boolean, optional
79 | if set to true the original dataframe will be altered to contain
80 | the result of the filtering, otherwise a copy will be returned, by default False
81 |
82 | Returns
83 | -------
84 | DataFrame
85 | Data with 3 additional features: segment_stop, lat_mean and lon_mean or None
86 | segment_stop indicates the trajectory segment to which the point belongs
87 | lat_mean and lon_mean:
88 | if the default option is used, lat_mean and lon_mean are defined
89 | based on point that repeats most within the segment
90 | On the other hand, if centroid option is used,
91 | lat_mean and lon_mean are defined by centroid of
92 | the all points into segment
93 |
94 | """
95 | if not inplace:
96 | move_data = move_data.copy()
97 |
98 | if (label_segment not in move_data) & (label_stop not in move_data):
99 | create_or_update_move_stop_by_dist_time(
100 | move_data, dist_radius, time_radius, label_id, inplace=True
101 | )
102 |
103 | logger.debug('...setting mean to lat and lon...')
104 | lat_mean = np.full(move_data.shape[0], -1.0, dtype=np.float64)
105 | lon_mean = np.full(move_data.shape[0], -1.0, dtype=np.float64)
106 |
107 | if drop_moves is False:
108 | lat_mean[move_data[~move_data[label_stop]].index] = np.NaN
109 | lon_mean[move_data[~move_data[label_stop]].index] = np.NaN
110 | else:
111 | logger.debug('...move segments will be dropped...')
112 |
113 | logger.debug('...get only segments stop...')
114 | segments = move_data[move_data[label_stop]][label_segment].unique()
115 |
116 | for idx in progress_bar(
117 | segments, desc=f'Generating {label_segment} and {label_stop}'
118 | ):
119 | filter_ = move_data[label_segment] == idx
120 |
121 | size_id = move_data[filter_].shape[0]
122 | # verify if filter is None
123 | if size_id > 1:
124 | # get first and last point of each stop segment
125 | ind_start = move_data[filter_].iloc[[0]].index
126 | ind_end = move_data[filter_].iloc[[-1]].index
127 |
128 | if point_mean == 'default':
129 | p = (
130 | move_data[filter_]
131 | .groupby([LATITUDE, LONGITUDE], as_index=False)
132 | .agg({'id': 'count'})
133 | .sort_values(['id'])
134 | .tail(1)
135 | )
136 | lat_mean[ind_start] = p.iloc[0, 0]
137 | lon_mean[ind_start] = p.iloc[0, 1]
138 | lat_mean[ind_end] = p.iloc[0, 0]
139 | lon_mean[ind_end] = p.iloc[0, 1]
140 |
141 | elif point_mean == 'centroid':
142 | # set lat and lon mean to first_point
143 | # and last points to each segment
144 | lat_mean[ind_start] = move_data.loc[filter_][LATITUDE].mean()
145 | lon_mean[ind_start] = move_data.loc[filter_][LONGITUDE].mean()
146 | lat_mean[ind_end] = move_data.loc[filter_][LATITUDE].mean()
147 | lon_mean[ind_end] = move_data.loc[filter_][LONGITUDE].mean()
148 | else:
149 | logger.debug(f'There are segments with only one point: {idx}')
150 |
151 | move_data[LAT_MEAN] = lat_mean
152 | move_data[LON_MEAN] = lon_mean
153 | del lat_mean
154 | del lon_mean
155 |
156 | shape_before = move_data.shape[0]
157 | # filter points to drop
158 | filter_drop = (
159 | (move_data[LAT_MEAN] == -1.0)
160 | & (move_data[LON_MEAN] == -1.0)
161 | )
162 | shape_drop = move_data[filter_drop].shape[0]
163 |
164 | if shape_drop > 0:
165 | logger.debug('...Dropping %s points...' % shape_drop)
166 | move_data.drop(move_data[filter_drop].index, inplace=True)
167 |
168 | logger.debug(
169 | '...Shape_before: %s\n...Current shape: %s'
170 | % (shape_before, move_data.shape[0])
171 | )
172 |
173 | if not inplace:
174 | return move_data
175 |
--------------------------------------------------------------------------------
/pymove/preprocessing/stay_point_detection.py:
--------------------------------------------------------------------------------
1 | """
2 | Stop point detection operations.
3 |
4 | create_or_update_move_stop_by_dist_time,
5 | create_or_update_move_and_stop_by_radius
6 |
7 | """
8 | from __future__ import annotations
9 |
10 | from typing import TYPE_CHECKING
11 |
12 | import numpy as np
13 |
14 | from pymove.preprocessing.segmentation import by_max_dist
15 | from pymove.utils.constants import (
16 | DIST_TO_PREV,
17 | MOVE,
18 | SEGMENT_STOP,
19 | SITUATION,
20 | STOP,
21 | TIME_TO_PREV,
22 | TRAJ_ID,
23 | )
24 | from pymove.utils.log import logger, timer_decorator
25 |
26 | if TYPE_CHECKING:
27 | from pymove.core.dask import DaskMoveDataFrame
28 | from pymove.core.pandas import PandasMoveDataFrame
29 |
30 |
31 | @timer_decorator
32 | def create_or_update_move_stop_by_dist_time(
33 | move_data: 'PandasMoveDataFrame' | 'DaskMoveDataFrame',
34 | dist_radius: float = 30,
35 | time_radius: float = 900,
36 | label_id: str = TRAJ_ID,
37 | new_label: str = SEGMENT_STOP,
38 | inplace: bool = False
39 | ) -> 'PandasMoveDataFrame' | 'DaskMoveDataFrame' | None:
40 | """
41 | Determines the stops and moves points of the dataframe.
42 |
43 | If these points already exist, they will be updated.
44 |
45 | Parameters
46 | ----------
47 | move_data : dataframe
48 | The input trajectory data
49 | dist_radius : float, optional
50 | The first step in this function is segmenting the trajectory
51 | The segments are used to find the stop points
52 | The dist_radius defines the distance used in the segmentation,
53 | by default 30
54 | time_radius : float, optional
55 | The time_radius used to determine if a segment is a stop
56 | If the user stayed in the segment for a time
57 | greater than time_radius, than the segment is a stop,
58 | by default 900
59 | label_id : str, optional
60 | Indicates the label of the id column in the user dataframe, by default TRAJ_ID
61 | new_label : float, optional
62 | Is the name of the column to indicates if a point is a stop of a move,
63 | by default SEGMENT_STOP
64 | inplace : bool, optional
65 | if set to true the original dataframe will be altered to
66 | contain the result of the filtering, otherwise a copy will be returned,
67 | by default False
68 |
69 | Returns
70 | -------
71 | DataFrame
72 | DataFrame with 2 aditional features: segment_stop and stop.
73 | segment_stop indicates the trajectory segment to which the point belongs
74 | stop indicates if the point represents a stop.
75 |
76 | """
77 | if not inplace:
78 | move_data = move_data.copy()
79 |
80 | by_max_dist(
81 | move_data,
82 | label_id=label_id,
83 | max_dist_between_adj_points=dist_radius,
84 | label_new_tid=new_label,
85 | inplace=True
86 | )
87 |
88 | move_data.generate_dist_time_speed_features(
89 | label_id=new_label
90 | )
91 |
92 | logger.debug('Create or update stop as True or False')
93 | logger.debug(
94 | '...Creating stop features as True or False using %s to time in seconds'
95 | % time_radius
96 | )
97 | move_data[STOP] = False
98 | move_dataagg_tid = (
99 | move_data.groupby(by=new_label)
100 | .agg({TIME_TO_PREV: 'sum'})
101 | .query(f'{TIME_TO_PREV} > {time_radius}')
102 | .index
103 | )
104 | idx = move_data[
105 | move_data[new_label].isin(move_dataagg_tid)
106 | ].index
107 | move_data.at[idx, STOP] = True
108 | logger.debug(move_data[STOP].value_counts())
109 |
110 | if not inplace:
111 | return move_data
112 |
113 |
114 | @timer_decorator
115 | def create_or_update_move_and_stop_by_radius(
116 | move_data: 'PandasMoveDataFrame' | 'DaskMoveDataFrame',
117 | radius: float = 0,
118 | target_label: str = DIST_TO_PREV,
119 | new_label: str = SITUATION,
120 | inplace: bool = False,
121 | ) -> 'PandasMoveDataFrame' | 'DaskMoveDataFrame' | None:
122 | """
123 | Finds the stops and moves points of the dataframe.
124 |
125 | If these points already exist, they will be updated.
126 |
127 | Parameters
128 | ----------
129 | move_data : dataframe
130 | The input trajectory data
131 | radius : float, optional
132 | The radius value is used to determine if a segment is a stop.
133 | If the value of the point in target_label is
134 | greater than radius, the segment is a stop, otherwise it's a move,
135 | by default 0
136 | target_label : String, optional
137 | The feature used to calculate the stay points, by default DIST_TO_PREV
138 | new_label : String, optional
139 | Is the name of the column to indicates if a point is a stop of a move,
140 | by default SITUATION
141 | inplace : bool, optional
142 | if set to true the original dataframe will be altered to
143 | contain the result of the filtering, otherwise a copy will be returned,
144 | by default False
145 |
146 | Returns
147 | -------
148 | DataFrame
149 | dataframe with 2 aditional features: segment_stop and new_label.
150 | segment_stop indicates the trajectory segment to which the point belongs
151 | new_label indicates if the point represents a stop or moving point.
152 |
153 | """
154 | logger.debug('\nCreating or updating features MOVE and STOPS...\n')
155 |
156 | if not inplace:
157 | move_data = move_data.copy()
158 |
159 | if DIST_TO_PREV not in move_data:
160 | move_data.generate_dist_features()
161 |
162 | conditions = (
163 | (move_data[target_label] > radius),
164 | (move_data[target_label] <= radius),
165 | )
166 | choices = [MOVE, STOP]
167 |
168 | move_data[new_label] = np.select(conditions, choices, np.nan)
169 | logger.debug(
170 | '\n....There are %s stops to this parameters\n'
171 | % (move_data[move_data[new_label] == STOP].shape[0])
172 | )
173 |
174 | if not inplace:
175 | return move_data
176 |
--------------------------------------------------------------------------------
/pymove/query/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains functions to perform queries on trajectories.
3 |
4 | query
5 |
6 | """
7 |
--------------------------------------------------------------------------------
/pymove/semantic/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains semantic functions that adds new infomation to the trajectories.
3 |
4 | semantic
5 |
6 | """
7 |
--------------------------------------------------------------------------------
/pymove/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/pymove/tests/__init__.py
--------------------------------------------------------------------------------
/pymove/tests/baseline/features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/pymove/tests/baseline/features.png
--------------------------------------------------------------------------------
/pymove/tests/baseline/plot_bounds.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/pymove/tests/baseline/plot_bounds.png
--------------------------------------------------------------------------------
/pymove/tests/baseline/plot_coords.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/pymove/tests/baseline/plot_coords.png
--------------------------------------------------------------------------------
/pymove/tests/baseline/plot_line.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/pymove/tests/baseline/plot_line.png
--------------------------------------------------------------------------------
/pymove/tests/baseline/shot_points_by_date.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/pymove/tests/baseline/shot_points_by_date.png
--------------------------------------------------------------------------------
/pymove/tests/baseline/shot_points_by_date_line.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/pymove/tests/baseline/shot_points_by_date_line.png
--------------------------------------------------------------------------------
/pymove/tests/baseline/show_gps_points.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/pymove/tests/baseline/show_gps_points.png
--------------------------------------------------------------------------------
/pymove/tests/baseline/traj_id.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/pymove/tests/baseline/traj_id.png
--------------------------------------------------------------------------------
/pymove/tests/baseline/trajectories.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/InsightLab/PyMove/15cccf0697117d3dae6be83c1eb8e8aa12eaba5e/pymove/tests/baseline/trajectories.png
--------------------------------------------------------------------------------
/pymove/tests/test_base_init.py:
--------------------------------------------------------------------------------
1 | try:
2 | from pymove import * # noqa
3 |
4 | _top_import_error = None
5 | except Exception as e:
6 | _top_import_error = e
7 |
8 |
9 | def test_import_skl():
10 | assert _top_import_error is None
11 |
--------------------------------------------------------------------------------
/pymove/tests/test_core_dask.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from dask.dataframe import DataFrame as DaskDataFrame
4 | from dask.dataframe import from_pandas
5 | from pandas import DataFrame, Timestamp
6 |
7 | from pymove import DaskMoveDataFrame, MoveDataFrame, PandasMoveDataFrame, read_csv
8 | from pymove.utils.constants import (
9 | DATETIME,
10 | LATITUDE,
11 | LONGITUDE,
12 | TRAJ_ID,
13 | TYPE_DASK,
14 | TYPE_PANDAS,
15 | )
16 |
17 | list_data = [
18 | [39.984094, 116.319236, '2008-10-23 05:53:05', 1],
19 | [39.984198, 116.319322, '2008-10-23 05:53:06', 1],
20 | [39.984224, 116.319402, '2008-10-23 05:53:11', 2],
21 | [39.984224, 116.319402, '2008-10-23 05:53:11', 2],
22 | ]
23 |
24 | str_data_default = """
25 | lat,lon,datetime,id
26 | 39.984093,116.319236,2008-10-23 05:53:05,4
27 | 39.9842,116.319322,2008-10-23 05:53:06,1
28 | 39.984222,116.319402,2008-10-23 05:53:11,2
29 | 39.984222,116.319402,2008-10-23 05:53:11,2
30 | """
31 |
32 | str_data_different = """
33 | latitude,longitude,time,traj_id
34 | 39.984093,116.319236,2008-10-23 05:53:05,4
35 | 39.9842,116.319322,2008-10-23 05:53:06,1
36 | 39.984222,116.319402,2008-10-23 05:53:11,2
37 | 39.984222,116.319402,2008-10-23 05:53:11,2
38 | """
39 |
40 | str_data_missing = """
41 | 39.984093,116.319236,2008-10-23 05:53:05,4
42 | 39.9842,116.319322,2008-10-23 05:53:06,1
43 | 39.984222,116.319402,2008-10-23 05:53:11,2
44 | 39.984222,116.319402,2008-10-23 05:53:11,2
45 | """
46 |
47 |
48 | def _default_move_df():
49 | return MoveDataFrame(
50 | data=list_data,
51 | latitude=LATITUDE,
52 | longitude=LONGITUDE,
53 | datetime=DATETIME,
54 | traj_id=TRAJ_ID,
55 | type_=TYPE_DASK,
56 | )
57 |
58 |
59 | def _default_dask_df():
60 | df = DataFrame(
61 | data=[
62 | [39.984094, 116.319236, Timestamp('2008-10-23 05:53:05'), 1],
63 | [39.984198, 116.319322, Timestamp('2008-10-23 05:53:06'), 1],
64 | [39.984224, 116.319402, Timestamp('2008-10-23 05:53:11'), 2],
65 | [39.984224, 116.319402, Timestamp('2008-10-23 05:53:11'), 2],
66 | ],
67 | columns=['lat', 'lon', 'datetime', 'id'],
68 | index=[0, 1, 2, 3],
69 | )
70 | return from_pandas(df, npartitions=1)
71 |
72 |
73 | def _has_columns(data):
74 | cols = data.columns
75 | if LATITUDE in cols and LONGITUDE in cols and DATETIME in cols:
76 | return True
77 | return False
78 |
79 |
80 | def _validate_move_data_frame_data(data):
81 | try:
82 | if data.dtypes.lat != 'float64':
83 | return False
84 | if data.dtypes.lon != 'float64':
85 | return False
86 | if data.dtypes.datetime != 'datetime64[ns]':
87 | return False
88 | return True
89 | except AttributeError:
90 | return False
91 |
92 |
93 | def test_move_data_frame_from_list():
94 | move_df = _default_move_df()
95 | assert _has_columns(move_df)
96 | assert _validate_move_data_frame_data(move_df)
97 | assert isinstance(move_df, DaskMoveDataFrame)
98 |
99 |
100 | def test_move_data_frame_from_file(tmpdir):
101 | d = tmpdir.mkdir('prepossessing')
102 |
103 | file_default_columns = d.join('test_read_default.csv')
104 | file_default_columns.write(str_data_default)
105 | filename_default = os.path.join(
106 | file_default_columns.dirname, file_default_columns.basename
107 | )
108 |
109 | move_df = read_csv(filename_default, type_=TYPE_DASK)
110 | assert _has_columns(move_df)
111 | assert _validate_move_data_frame_data(move_df)
112 | assert isinstance(move_df, DaskMoveDataFrame)
113 |
114 | file_different_columns = d.join('test_read_different.csv')
115 | file_different_columns.write(str_data_different)
116 | filename_diferent = os.path.join(
117 | file_different_columns.dirname, file_different_columns.basename
118 | )
119 |
120 | move_df = read_csv(
121 | filename_diferent,
122 | latitude='latitude',
123 | longitude='longitude',
124 | datetime='time',
125 | traj_id='traj_id',
126 | type_=TYPE_DASK,
127 | )
128 | assert _has_columns(move_df)
129 | assert _validate_move_data_frame_data(move_df)
130 | assert isinstance(move_df, DaskMoveDataFrame)
131 |
132 | file_missing_columns = d.join('test_read_missing.csv')
133 | file_missing_columns.write(str_data_missing)
134 | filename_missing = os.path.join(
135 | file_missing_columns.dirname, file_missing_columns.basename
136 | )
137 |
138 | move_df = read_csv(
139 | filename_missing,
140 | names=[LATITUDE, LONGITUDE, DATETIME, TRAJ_ID],
141 | type_=TYPE_DASK,
142 | )
143 | assert _has_columns(move_df)
144 | assert _validate_move_data_frame_data(move_df)
145 | assert isinstance(move_df, DaskMoveDataFrame)
146 |
147 |
148 | def test_move_data_frame_from_dict():
149 | dict_data = {
150 | LATITUDE: [39.984198, 39.984224, 39.984094],
151 | LONGITUDE: [116.319402, 116.319322, 116.319402],
152 | DATETIME: [
153 | '2008-10-23 05:53:11',
154 | '2008-10-23 05:53:06',
155 | '2008-10-23 05:53:06',
156 | ],
157 | }
158 | move_df = MoveDataFrame(
159 | data=dict_data,
160 | latitude=LATITUDE,
161 | longitude=LONGITUDE,
162 | datetime=DATETIME,
163 | traj_id=TRAJ_ID,
164 | type_=TYPE_DASK,
165 | )
166 | assert _has_columns(move_df)
167 | assert _validate_move_data_frame_data(move_df)
168 | assert isinstance(move_df, DaskMoveDataFrame)
169 |
170 |
171 | def test_move_data_frame_from_data_frame():
172 | df = DataFrame(
173 | data=[
174 | [39.984094, 116.319236, Timestamp('2008-10-23 05:53:05'), 1],
175 | [39.984198, 116.319322, Timestamp('2008-10-23 05:53:06'), 1],
176 | [39.984224, 116.319402, Timestamp('2008-10-23 05:53:11'), 2],
177 | [39.984224, 116.319402, Timestamp('2008-10-23 05:53:11'), 2],
178 | ],
179 | columns=['lat', 'lon', 'datetime', 'id'],
180 | index=[0, 1, 2, 3],
181 | )
182 | move_df = MoveDataFrame(
183 | data=df,
184 | latitude=LATITUDE,
185 | longitude=LONGITUDE,
186 | datetime=DATETIME,
187 | type_=TYPE_DASK,
188 | )
189 | assert _has_columns(move_df)
190 | assert _validate_move_data_frame_data(move_df)
191 | assert isinstance(move_df, DaskMoveDataFrame)
192 |
193 |
194 | def test_attribute_error_from_data_frame():
195 | df = DataFrame(
196 | data=[
197 | [39.984094, 116.319236, Timestamp('2008-10-23 05:53:05'), 1],
198 | [39.984198, 116.319322, Timestamp('2008-10-23 05:53:06'), 1],
199 | [39.984224, 116.319402, Timestamp('2008-10-23 05:53:11'), 2],
200 | [39.984224, 116.319402, Timestamp('2008-10-23 05:53:11'), 2],
201 | ],
202 | columns=['laterr', 'lon', 'datetime', 'id'],
203 | index=[0, 1, 2, 3],
204 | )
205 | try:
206 | MoveDataFrame(
207 | data=df,
208 | latitude=LATITUDE,
209 | longitude=LONGITUDE,
210 | datetime=DATETIME,
211 | type_=TYPE_DASK,
212 | )
213 | raise AssertionError(
214 | 'AttributeError error not raised by MoveDataFrame'
215 | )
216 | except AttributeError:
217 | pass
218 |
219 | df = DataFrame(
220 | data=[
221 | [39.984094, 116.319236, Timestamp('2008-10-23 05:53:05'), 1],
222 | [39.984198, 116.319322, Timestamp('2008-10-23 05:53:06'), 1],
223 | [39.984224, 116.319402, Timestamp('2008-10-23 05:53:11'), 2],
224 | [39.984224, 116.319402, Timestamp('2008-10-23 05:53:11'), 2],
225 | ],
226 | columns=['lat', 'lonerr', 'datetime', 'id'],
227 | index=[0, 1, 2, 3],
228 | )
229 | try:
230 | MoveDataFrame(
231 | data=df,
232 | latitude=LATITUDE,
233 | longitude=LONGITUDE,
234 | datetime=DATETIME,
235 | type_=TYPE_DASK,
236 | )
237 | raise AssertionError(
238 | 'AttributeError error not raised by MoveDataFrame'
239 | )
240 | except AttributeError:
241 | pass
242 |
243 | df = DataFrame(
244 | data=[
245 | [39.984094, 116.319236, Timestamp('2008-10-23 05:53:05'), 1],
246 | [39.984198, 116.319322, Timestamp('2008-10-23 05:53:06'), 1],
247 | [39.984224, 116.319402, Timestamp('2008-10-23 05:53:11'), 2],
248 | [39.984224, 116.319402, Timestamp('2008-10-23 05:53:11'), 2],
249 | ],
250 | columns=['lat', 'lon', 'datetimerr', 'id'],
251 | index=[0, 1, 2, 3],
252 | )
253 | try:
254 | MoveDataFrame(
255 | data=df,
256 | latitude=LATITUDE,
257 | longitude=LONGITUDE,
258 | datetime=DATETIME,
259 | type_=TYPE_DASK,
260 | )
261 | raise AssertionError(
262 | 'AttributeError error not raised by MoveDataFrame'
263 | )
264 | except AttributeError:
265 | pass
266 |
267 |
268 | def test_convert_to():
269 | move_df = _default_move_df()
270 |
271 | assert move_df._type == TYPE_DASK
272 | assert isinstance(move_df, DaskMoveDataFrame)
273 | assert isinstance(move_df._data, DaskDataFrame)
274 |
275 | move_df_pandas = move_df.convert_to('pandas')
276 | assert move_df_pandas._type == TYPE_PANDAS
277 | assert isinstance(move_df_pandas, PandasMoveDataFrame)
278 |
279 | assert move_df._type == TYPE_DASK
280 | assert isinstance(move_df, DaskMoveDataFrame)
281 | assert isinstance(move_df._data, DaskDataFrame)
282 |
283 |
284 | def test_get_type():
285 | move_df = _default_move_df()
286 |
287 | assert move_df.get_type() == TYPE_DASK
288 |
--------------------------------------------------------------------------------
/pymove/tests/test_core_dataframe.py:
--------------------------------------------------------------------------------
1 | from dateutil.parser._parser import ParserError
2 | from numpy.testing import assert_equal
3 | from pandas import DataFrame, Series
4 | from pandas.testing import assert_series_equal
5 |
6 | from pymove.core.dataframe import MoveDataFrame
7 |
8 |
9 | def test_has_columns():
10 | df = DataFrame(columns=['lat', 'lon', 'datetime'])
11 | assert MoveDataFrame.has_columns(df)
12 |
13 | df = DataFrame(columns=['lat', 'lon', 'time'])
14 | assert not MoveDataFrame.has_columns(df)
15 |
16 |
17 | def test_validate_columns():
18 | df = DataFrame(
19 | data=[[0, 0, '01-01-2020', 0]],
20 | columns=['lat', 'lon', 'datetime', 'id']
21 | )
22 | MoveDataFrame.validate_move_data_frame(df)
23 |
24 | expected = Series(
25 | data=['float64', 'float64', ' final_size
39 |
40 |
41 | def test_total_size():
42 |
43 | move_df = _default_move_df()
44 |
45 | expected_initial_size = 280
46 |
47 | assert abs(mem.total_size(move_df) - expected_initial_size) <= 20
48 |
49 |
50 | def test_begin_operation():
51 |
52 | process = psutil.Process(os.getpid())
53 |
54 | expected = {'process': process,
55 | 'init': process.memory_info()[0],
56 | 'start': time.time(),
57 | 'name': 'operation'}
58 |
59 | operation_info = mem.begin_operation('operation')
60 |
61 | assert_equal(list(operation_info.keys()), list(expected.keys()))
62 | assert_equal(operation_info['process'], expected['process'])
63 | assert_equal(int(operation_info['init']), int(expected['init']))
64 | assert_equal(int(operation_info['start']), int(expected['start']))
65 | assert_equal(operation_info['name'], expected['name'])
66 |
67 |
68 | def test_end_operation():
69 |
70 | operation = mem.begin_operation('operation')
71 |
72 | finish = operation['process'].memory_info()[0]
73 |
74 | last_operation_mem_usage = finish - operation['init']
75 |
76 | operation_info = mem.end_operation(operation)
77 |
78 | last_operation_time_duration = time.time() - operation['start']
79 |
80 | expected = {'name': 'operation',
81 | 'time in seconds': last_operation_time_duration ,
82 | 'memory': mem.sizeof_fmt(last_operation_mem_usage)}
83 |
84 | assert_equal(list(operation_info.keys()), list(expected.keys()))
85 | assert_equal(operation_info['name'], expected['name'])
86 | assert_equal(int(operation_info['time in seconds']),
87 | int(expected['time in seconds']))
88 | assert_equal(operation_info['memory'], expected['memory'])
89 |
90 |
91 | def test_sizeof_fmt():
92 |
93 | expected = '1.0 KiB'
94 |
95 | result = mem.sizeof_fmt(1024)
96 |
97 | assert_equal(expected, result)
98 |
99 | expected = '9.5 MiB'
100 |
101 | result = mem.sizeof_fmt(10000000)
102 |
103 | assert_equal(expected, result)
104 |
105 | expected = '9.3 GiB'
106 |
107 | result = mem.sizeof_fmt(10000000000)
108 |
109 | assert_equal(expected, result)
110 |
111 | expected = '10.0 b'
112 |
113 | result = mem.sizeof_fmt(10, 'b')
114 |
115 | assert_equal(expected, result)
116 |
117 |
118 | def test_top_mem_vars():
119 | move_df = _default_move_df()
120 | list_data_ = list_data
121 | local_vars = mem.top_mem_vars(locals())
122 |
123 | assert_array_equal(local_vars.shape, (2, 2))
124 | assert_array_equal(local_vars.columns, ['var', 'mem'])
125 | assert_array_equal(local_vars['var'].values, ['move_df', 'list_data_'])
126 |
--------------------------------------------------------------------------------
/pymove/tests/test_utils_visual.py:
--------------------------------------------------------------------------------
1 | import codecs
2 | import os
3 |
4 | from matplotlib.pyplot import cm
5 | from numpy.testing import assert_array_equal, assert_equal
6 |
7 | from pymove import MoveDataFrame
8 | from pymove.utils import visual
9 | from pymove.utils.constants import COLORS, DATETIME, LATITUDE, LONGITUDE, TRAJ_ID
10 |
11 | list_data = [
12 | [39.984094, 116.319236, '2008-10-23 05:53:05', 1],
13 | [39.984198, 116.319322, '2008-10-23 05:53:06', 1],
14 | [39.984224, 116.319402, '2008-10-23 05:53:11', 1],
15 | [39.984211, 116.319389, '2008-10-23 05:53:16', 1],
16 | [39.984217, 116.319422, '2008-10-23 05:53:21', 1],
17 | ]
18 |
19 |
20 | def _default_move_df():
21 | return MoveDataFrame(
22 | data=list_data,
23 | latitude=LATITUDE,
24 | longitude=LONGITUDE,
25 | datetime=DATETIME,
26 | traj_id=TRAJ_ID,
27 | )
28 |
29 |
30 | def test_generate_color():
31 | color = visual.generate_color()
32 | assert color in COLORS.values()
33 |
34 |
35 | def test_rgb():
36 |
37 | expected = (51, 51, 153)
38 |
39 | rgb = visual.rgb([0.6, 0.2, 0.2])
40 |
41 | assert_array_equal(rgb, expected)
42 |
43 |
44 | def test_hex_rgb():
45 |
46 | expected = '#333399'
47 |
48 | hex_rgb = visual.hex_rgb([0.6, 0.2, 0.2])
49 |
50 | assert_equal(hex_rgb, expected)
51 |
52 |
53 | def test_cmap_hex_color():
54 | cm_hex = visual.cmap_hex_color(cm.jet, 0)
55 | assert cm_hex == '#000080'
56 |
57 |
58 | def test_get_cmap():
59 | cmap = visual.get_cmap('tab20')
60 | assert cmap.N == 20
61 |
62 |
63 | def test_save_wkt(tmpdir):
64 |
65 | expected = ('id;linestring\n1;'
66 | 'LINESTRING(116.319236 39.984094,'
67 | '116.319322 39.984198,116.319402 '
68 | '39.984224,116.319389 39.984211,'
69 | '116.319422 39.984217)\n')
70 |
71 | move_df = _default_move_df()
72 |
73 | d = tmpdir.mkdir('utils')
74 |
75 | file_write_default = d.join('test_save_map.wkt')
76 | filename_write_default = os.path.join(
77 | file_write_default.dirname, file_write_default.basename
78 | )
79 |
80 | visual.save_wkt(move_data=move_df, filename=filename_write_default)
81 |
82 | file = codecs.open(file_write_default, 'r')
83 |
84 | map_info = file.read()
85 |
86 | assert_equal(map_info, expected)
87 |
--------------------------------------------------------------------------------
/pymove/tests/test_visualization_matplotlib.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import matplotlib.pyplot as plt
4 | from matplotlib.testing.compare import compare_images
5 | from shapely.geometry import LineString
6 |
7 | import pymove.visualization.matplotlib as mpl
8 | from pymove import MoveDataFrame
9 | from pymove.utils.constants import (
10 | DATE,
11 | DATETIME,
12 | DAY,
13 | HOUR,
14 | LATITUDE,
15 | LONGITUDE,
16 | PERIOD,
17 | TID,
18 | TRAJ_ID,
19 | )
20 |
21 | list_data = [
22 | [39.984094, 116.319236, '2008-10-23 05:53:05', 1],
23 | [39.984198, 116.319322, '2008-10-23 05:53:06', 1],
24 | [39.984224, 116.319402, '2008-10-23 05:53:11', 1],
25 | [39.984211, 116.319389, '2008-10-23 05:53:16', 1],
26 | [39.984217, 116.319422, '2008-10-23 05:53:21', 1],
27 | ]
28 |
29 |
30 | def _default_move_df():
31 | return MoveDataFrame(
32 | data=list_data,
33 | latitude=LATITUDE,
34 | longitude=LONGITUDE,
35 | datetime=DATETIME,
36 | traj_id=TRAJ_ID,
37 | )
38 |
39 |
40 | def test_show_object_id_by_date(tmpdir):
41 |
42 | move_df = _default_move_df()
43 |
44 | d = tmpdir.mkdir('visualization')
45 |
46 | file_write_default = d.join('shot_points_by_date.png')
47 | filename_write_default = os.path.join(
48 | file_write_default.dirname, file_write_default.basename
49 | )
50 |
51 | mpl.show_object_id_by_date(
52 | move_data=move_df,
53 | name=filename_write_default,
54 | save_fig=True
55 | )
56 |
57 | test_dir = os.path.abspath(os.path.dirname(__file__))
58 | data_dir = os.path.join(test_dir, 'baseline/shot_points_by_date.png')
59 |
60 | compare_images(
61 | data_dir,
62 | filename_write_default,
63 | 0.0001,
64 | in_decorator=False
65 | )
66 |
67 |
68 | def test_plot_trajectory_by_id(tmpdir):
69 | move_df = _default_move_df()
70 | move_df[TRAJ_ID] = ['1', '1', '2', '2', '2']
71 |
72 | d = tmpdir.mkdir('visualization')
73 |
74 | file_write_default = d.join('traj_id.png')
75 | filename_write_default = os.path.join(
76 | file_write_default.dirname, file_write_default.basename
77 | )
78 |
79 | mpl.plot_trajectory_by_id(move_df, '1', save_fig=True, name=filename_write_default)
80 |
81 | test_dir = os.path.abspath(os.path.dirname(__file__))
82 | data_dir = os.path.join(test_dir, 'baseline/traj_id.png')
83 |
84 | compare_images(
85 | data_dir,
86 | filename_write_default,
87 | 0.0001,
88 | in_decorator=False
89 | )
90 |
91 |
92 | def test_plot_all_features(tmpdir):
93 |
94 | move_df = _default_move_df()
95 |
96 | d = tmpdir.mkdir('visualization')
97 |
98 | file_write_default = d.join('features.png')
99 | filename_write_default = os.path.join(
100 | file_write_default.dirname, file_write_default.basename
101 | )
102 |
103 | mpl.plot_all_features(move_df, save_fig=True, name=filename_write_default)
104 |
105 | test_dir = os.path.abspath(os.path.dirname(__file__))
106 | data_dir = os.path.join(test_dir, 'baseline/features.png')
107 |
108 | compare_images(data_dir,
109 | filename_write_default,
110 | 0.0001,
111 | in_decorator=False)
112 |
113 | move_df['lat'] = move_df['lat'].astype('str')
114 | move_df['lon'] = move_df['lon'].astype('str')
115 |
116 | try:
117 | move_df.plot_all_features(name=filename_write_default)
118 | raise AssertionError(
119 | 'AttributeError error not raised by MoveDataFrame'
120 | )
121 | except AttributeError:
122 | pass
123 |
124 |
125 | def test_plot_trajectories(tmpdir):
126 |
127 | move_df = _default_move_df()
128 |
129 | d = tmpdir.mkdir('visualization')
130 |
131 | file_write_default = d.join('trajectories.png')
132 | filename_write_default = os.path.join(
133 | file_write_default.dirname, file_write_default.basename
134 | )
135 |
136 | mpl.plot_trajectories(move_df, save_fig=True, name=filename_write_default)
137 |
138 | test_dir = os.path.abspath(os.path.dirname(__file__))
139 | data_dir = os.path.join(test_dir, 'baseline/trajectories.png')
140 |
141 | compare_images(data_dir,
142 | filename_write_default,
143 | 0.0001,
144 | in_decorator=False)
145 |
146 |
147 | def test_plot_coords(tmpdir):
148 | d = tmpdir.mkdir('visualization')
149 |
150 | file_write_default = d.join('plot_coords.png')
151 |
152 | filename_write_default = os.path.join(
153 | file_write_default.dirname, file_write_default.basename
154 | )
155 |
156 | coords = LineString([(1, 1), (1, 2), (2, 2), (2, 3)])
157 |
158 | _, ax = plt.subplots(figsize=(21, 9))
159 | mpl.plot_coords(ax, coords)
160 | plt.savefig(filename_write_default, dpi=100)
161 |
162 | test_dir = os.path.abspath(os.path.dirname(__file__))
163 | data_dir = os.path.join(test_dir, 'baseline/plot_coords.png')
164 |
165 | compare_images(
166 | data_dir, filename_write_default, 0.0001, in_decorator=False
167 | )
168 |
169 |
170 | def test_plot_bounds(tmpdir):
171 | d = tmpdir.mkdir('visualization')
172 |
173 | file_write_default = d.join('plot_bounds.png')
174 |
175 | filename_write_default = os.path.join(
176 | file_write_default.dirname, file_write_default.basename
177 | )
178 |
179 | bounds = LineString([(1, 1), (1, 2), (2, 2), (2, 3)])
180 |
181 | _, ax = plt.subplots(figsize=(21, 9))
182 | mpl.plot_bounds(ax, bounds)
183 | plt.savefig(filename_write_default, dpi=100)
184 |
185 | test_dir = os.path.abspath(os.path.dirname(__file__))
186 | data_dir = os.path.join(test_dir, 'baseline/plot_bounds.png')
187 |
188 | compare_images(
189 | data_dir, filename_write_default, 0.0001, in_decorator=False
190 | )
191 |
192 |
193 | def test_plot_line(tmpdir):
194 | d = tmpdir.mkdir('visualization')
195 |
196 | file_write_default = d.join('plot_line.png')
197 |
198 | filename_write_default = os.path.join(
199 | file_write_default.dirname, file_write_default.basename
200 | )
201 |
202 | line = LineString([(1, 1), (1, 2), (2, 2), (2, 3)])
203 |
204 | _, ax = plt.subplots(figsize=(21, 9))
205 | mpl.plot_line(ax, line)
206 | plt.savefig(filename_write_default, dpi=100)
207 |
208 | test_dir = os.path.abspath(os.path.dirname(__file__))
209 | data_dir = os.path.join(test_dir, 'baseline/plot_line.png')
210 |
211 | compare_images(
212 | data_dir, filename_write_default, 0.0001, in_decorator=False
213 | )
214 |
--------------------------------------------------------------------------------
/pymove/uncertainty/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains functions to mask trajectories.
3 |
4 | privacy,
5 | reducing
6 |
7 | """
8 |
--------------------------------------------------------------------------------
/pymove/uncertainty/privacy.py:
--------------------------------------------------------------------------------
1 | """Not implemented."""
2 |
--------------------------------------------------------------------------------
/pymove/uncertainty/reducing.py:
--------------------------------------------------------------------------------
1 | """Not implemented."""
2 |
--------------------------------------------------------------------------------
/pymove/utils/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains utility functions.
3 |
4 | constants,
5 | conversions,
6 | data_augmentation,
7 | datetime,
8 | distances,
9 | geoutils,
10 | integration,
11 | log,
12 | math,
13 | mem,
14 | trajectories,
15 | visual
16 |
17 | """
18 |
--------------------------------------------------------------------------------
/pymove/utils/constants.py:
--------------------------------------------------------------------------------
1 | """PyMove constants."""
2 |
3 | LABEL = 'label'
4 | LATITUDE = 'lat'
5 | LONGITUDE = 'lon'
6 | DATETIME = 'datetime'
7 | TRAJ_ID = 'id'
8 | TRAJECTORY = 'trajectory'
9 | TID = 'tid'
10 | UID = 'user_id'
11 |
12 | START = 'start'
13 | DESTINY = 'destiny'
14 | LOCAL_LABEL = 'local_label'
15 | GRID_ID = 'grid_id'
16 |
17 | GEOHASH = 'geohash'
18 | BIN_GEOHASH = 'bin_geohash'
19 | LATITUDE_DECODE = 'lat_decode'
20 | LONGITUDE_DECODE = 'lon_decode'
21 |
22 | BASE_32 = ['0', '1', '2', '3', '4', '5',
23 | '6', '7', '8', '9', 'a', 'b',
24 | 'c', 'd', 'e', 'f', 'g', 'h',
25 | 'i', 'j', 'k', 'l', 'm', 'n',
26 | 'o', 'p', 'q', 'r', 's', 't',
27 | 'u', 'v', 'w', 'x', 'y', 'z']
28 |
29 | POI = 'poi'
30 | ID_POI = 'id_poi'
31 | DIST_POI = 'dist_poi'
32 | TYPE_POI = 'type_poi'
33 | NAME_POI = 'name_poi'
34 |
35 | EVENT_ID = 'event_id'
36 | EVENT_TYPE = 'event_type'
37 | DIST_EVENT = 'dist_event'
38 |
39 | CITY = 'city'
40 | HOME = 'home'
41 | ADDRESS = 'formatted_address'
42 | DIST_HOME = 'dist_home'
43 |
44 | GEOMETRY = 'geometry'
45 | VIOLATING = 'violating'
46 |
47 | HOUR = 'hour'
48 | HOUR_SIN = 'hour_sin'
49 | HOUR_COS = 'hour_cos'
50 | DATE = 'date'
51 | DAY = 'day'
52 | WEEK_END = 'weekend'
53 | WEEK_DAYS = [
54 | 'Monday',
55 | 'Tuesday',
56 | 'Wednesday',
57 | 'Thursday',
58 | 'Friday',
59 | 'Saturday',
60 | 'Sunday'
61 | ]
62 | PERIOD = 'period'
63 | DAY_PERIODS = [
64 | 'Early morning',
65 | 'Morning',
66 | 'Afternoon',
67 | 'Evening'
68 | ]
69 | TIME_SLOT = 'time_slot'
70 | TYPE_DASK = 'dask'
71 | TYPE_PANDAS = 'pandas'
72 |
73 | DIST_TO_PREV = 'dist_to_prev'
74 | DIST_TO_NEXT = 'dist_to_next'
75 | DIST_PREV_TO_NEXT = 'dist_prev_to_next'
76 | TIME_TO_PREV = 'time_to_prev'
77 | TIME_TO_NEXT = 'time_to_next'
78 | TIME_PREV_TO_NEXT = 'time_prev_to_next'
79 | SPEED_TO_PREV = 'speed_to_prev'
80 | SPEED_TO_NEXT = 'speed_to_next'
81 | SPEED_PREV_TO_NEXT = 'speed_prev_to_next'
82 | INDEX_GRID_LAT = 'index_grid_lat'
83 | INDEX_GRID_LON = 'index_grid_lon'
84 | INDEX_GRID = 'index_grid'
85 | TID_PART = 'tid_part'
86 | TID_SPEED = 'tid_speed'
87 | TID_TIME = 'tid_time'
88 | TID_DIST = 'tid_dist'
89 | SITUATION = 'situation'
90 | SEGMENT_STOP = 'segment_stop'
91 | STOP = 'stop'
92 | MOVE = 'move'
93 | POLYGON = 'polygon'
94 |
95 | LAT_MEAN = 'lat_mean'
96 | LON_MEAN = 'lon_mean'
97 |
98 | OUTLIER = 'outlier'
99 | OUT_BBOX = 'out_bbox'
100 | DEACTIVATED = 'deactivated_signal'
101 | JUMP = 'gps_jump'
102 | BLOCK = 'block_signal'
103 | SHORT = 'short_traj'
104 |
105 | TB = 'TB'
106 | GB = 'GB'
107 | MB = 'MG'
108 | KB = 'KB'
109 | B = 'bytes'
110 | COUNT = 'count'
111 |
112 | COLORS = {
113 | 0: '#000000', # black
114 | 1: '#808080', # gray
115 | 2: '#D3D3D3', # lightgray
116 | 3: '#FFFFFF', # white
117 | 4: '#800000', # red maroon
118 | 5: '#B22222', # red fire brick
119 | 6: '#DC143C', # red crimson
120 | 7: '#FF7F50', # coral
121 | 8: '#FF8C00', # dark orange
122 | 9: '#FFD700', # gold
123 | 10: '#FFFF00', # yellow
124 | 11: '#ADFF2F', # green yellow
125 | 12: '#9ACD32', # yellow green
126 | 13: '#6B8E23', # olive drab
127 | 14: '#808000', # olive
128 | 15: '#00FF00', # lime
129 | 16: '#008000', # green
130 | 17: '#3CB371', # medium sea green
131 | 18: '#00FF7F', # spring green
132 | 19: '#E0FFFF', # pale turquoise
133 | 20: '#00FFFF', # aqua/cyan
134 | 21: '#87CEFA', # light sky blue
135 | 22: '#00BFFF', # deep sky blue
136 | 23: '#1E90FF', # dodger blue
137 | 24: '#0000FF', # blue
138 | 25: '#6A5ACD', # slate blue
139 | 26: '#4B0082', # indigo
140 | 27: '#FF00FF', # fuchsia / magenta
141 | 28: '#EE82EE', # violet
142 | 29: '#8A2BE2', # blue violet
143 | 30: '#C71585', # medium violet red
144 | 31: '#FF1493', # deep pink
145 | 32: '#FFB6C1', # light pink
146 | 33: '#ffcc33', # sunglow
147 | 34: '#6699cc' # blue gray
148 | }
149 |
150 | TILES = [
151 | 'CartoDB positron',
152 | 'CartoDB dark_matter',
153 | 'Stamen Terrain',
154 | 'Stamen Toner',
155 | 'Stamen Watercolor',
156 | 'OpenStreetMap'
157 | ]
158 |
159 | USER_POINT = 'orange'
160 | LINE_COLOR = 'blue'
161 | POI_POINT = 'red'
162 | EVENT_POINT = 'purple'
163 |
164 |
165 | MEDP = 'MEDP'
166 | MEDT = 'MEDT'
167 |
168 | LOCAL_LABEL = 'local_label'
169 | PREV_LOCAL = 'prev_local'
170 | TID_STAT = 'tid_stat'
171 | THRESHOLD = 'threshold'
172 | MEAN = 'mean'
173 | STD = 'std'
174 | MIN = 'min'
175 | MAX = 'max'
176 | SUM = 'sum'
177 |
178 | POINT = 'Point'
179 |
180 | N_CLUSTER = 'n_cluster'
181 | EARTH_RADIUS = 6371
182 |
--------------------------------------------------------------------------------
/pymove/utils/log.py:
--------------------------------------------------------------------------------
1 | """
2 | Logging operations.
3 |
4 | progress_bar
5 | set_verbosity
6 | timer_decorator
7 |
8 | """
9 | from __future__ import annotations
10 |
11 | import logging
12 | import os
13 | import time
14 | from functools import wraps
15 | from typing import Callable, Iterable
16 |
17 | from IPython import get_ipython
18 | from IPython.display import display
19 | from ipywidgets import HTML, IntProgress, VBox
20 | from tqdm import tqdm as _tqdm
21 |
22 | from pymove.utils.datetime import deltatime_str
23 |
24 | LOG_LEVEL = os.getenv('PYMOVE_VERBOSE', 'INFO')
25 | logger = logging.getLogger('pymove')
26 | shell_handler = logging.StreamHandler()
27 | logger.setLevel(LOG_LEVEL)
28 | shell_handler.setLevel(LOG_LEVEL)
29 | logger.addHandler(shell_handler)
30 |
31 |
32 | def set_verbosity(level):
33 | """Change logging level."""
34 | logger.setLevel(level)
35 | shell_handler.setLevel(level)
36 |
37 |
38 | def timer_decorator(func: Callable) -> Callable:
39 | """A decorator that prints how long a function took to run."""
40 |
41 | @wraps(func)
42 | def wrapper(*args, **kwargs):
43 | t_start = time.time()
44 | result = func(*args, **kwargs)
45 | t_total = deltatime_str(time.time() - t_start)
46 | message = f'{func.__name__} took {t_total}'
47 | logger.debug('{}\n{}\n{}'.format('*' * len(message), message, '*' * len(message)))
48 | return result
49 |
50 | return wrapper
51 |
52 |
53 | def _log_progress(
54 | sequence: Iterable,
55 | desc: str | None = None,
56 | total: int | None = None,
57 | miniters: int | None = None
58 | ):
59 | """
60 | Make and display a progress bar.
61 |
62 | Parameters
63 | ----------
64 | sequence : iterable
65 | Represents a sequence of elements.
66 | desc : str, optional
67 | Represents the description of the operation, by default None.
68 | total : int, optional
69 | Represents the total/number elements in sequence, by default None.
70 | miniters : int, optional
71 | Represents the steps in which the bar will be updated, by default None.
72 |
73 | """
74 | if desc is None:
75 | desc = ''
76 | is_iterator = False
77 | if total is None:
78 | try:
79 | total = len(sequence) # type: ignore
80 | except TypeError:
81 | is_iterator = True
82 | if total is not None:
83 | if miniters is None:
84 | if total <= 200:
85 | miniters = 1
86 | else:
87 | miniters = int(total / 200)
88 | else:
89 | if miniters is None:
90 | miniters = 1
91 |
92 | if is_iterator:
93 | progress = IntProgress(min=0, max=1, value=1)
94 | progress.bar_style = 'info'
95 | else:
96 | progress = IntProgress(min=0, max=total, value=0)
97 | label = HTML()
98 | box = VBox(children=[label, progress])
99 | display(box)
100 |
101 | index = 0
102 | try:
103 | for index, record in enumerate(sequence, 1):
104 | if index == 1 or index % miniters == 0:
105 | if is_iterator:
106 | label.value = f'{desc}: {index} / ?'
107 | else:
108 | progress.value = index
109 | label.value = f'{desc}: {index} / {total}'
110 | yield record
111 | except Exception:
112 | progress.bar_style = 'danger'
113 | raise
114 | else:
115 | progress.bar_style = 'success'
116 | progress.value = index
117 | label.value = '{}: {}'.format(desc, str(index or '?'))
118 |
119 |
120 | try:
121 | if get_ipython().__class__.__name__ == 'ZMQInteractiveShell':
122 | _log_progress_bar = _log_progress
123 | else:
124 | raise NameError
125 | except NameError:
126 | _log_progress_bar = _tqdm
127 |
128 |
129 | def progress_bar(
130 | sequence: Iterable,
131 | desc: str | None = None,
132 | total: int | None = None,
133 | miniters: int | None = None
134 | ):
135 | """
136 | Make and display a progress bar.
137 |
138 | Parameters
139 | ----------
140 | sequence : iterable
141 | Represents a sequence of elements.
142 | desc : str, optional
143 | Represents the description of the operation, by default None.
144 | total : int, optional
145 | Represents the total/number elements in sequence, by default None.
146 | miniters : int, optional
147 | Represents the steps in which the bar will be updated, by default None.
148 |
149 | Return
150 | ------
151 | >>> from pymove.utils.log import progress_bar
152 | >>> for i in progress_bar(range(1,101), desc='Print 1 to 100'):
153 | >>> print(i)
154 | # A bar that shows the progress of the iterations
155 | """
156 | if logger.level > logging.INFO:
157 | return sequence
158 | return _log_progress_bar(sequence, desc, total, miniters)
159 |
--------------------------------------------------------------------------------
/pymove/utils/math.py:
--------------------------------------------------------------------------------
1 | """
2 | Math operations.
3 |
4 | is_number,
5 | std,
6 | avg_std,
7 | std_sample,
8 | avg_std_sample,
9 | arrays_avg,
10 | array_stats,
11 | interpolation
12 |
13 | """
14 | from __future__ import annotations
15 |
16 | import math
17 |
18 |
19 | def is_number(value: int | float | str):
20 | """
21 | Returns if value is numerical or not.
22 |
23 | Parameters
24 | ----------
25 | value : int, float, str
26 |
27 | Returns
28 | -------
29 | boolean
30 | True if numerical, otherwise False
31 |
32 | Examples
33 | --------
34 | >>> from pymove.utils.math import is_number
35 | >>> a, b, c, d = 50, 22.5, '11.25', 'house'
36 | >>> print(is_number(a), type(is_number(a)))
37 | True
38 | >>> print(is_number(b), type(is_number(b)))
39 | True
40 | >>> print(is_number(c), type(is_number(c)))
41 | True
42 | >>> print(is_number(d), type(is_number(d)))
43 | False
44 | """
45 | try:
46 | float(value)
47 | except ValueError:
48 | return False
49 | return True
50 |
51 |
52 | def std(values_array: list[float]) -> float:
53 | """
54 | Compute standard deviation.
55 |
56 | Parameters
57 | ----------
58 | values_array : array like of numerical values.
59 | Represents the set of values to compute the operation.
60 |
61 | Returns
62 | -------
63 | float
64 | Represents the value of standard deviation.
65 |
66 | References
67 | ----------
68 | squaring with * is over 3 times as fast as with **2
69 | http://stackoverflow.com/questions/29046346/comparison-of-power-to-multiplication-in-python
70 |
71 | Example
72 | -------
73 | >>> from pymove.utils.math import std
74 | >>> list = [7.8, 9.7, 6.4, 5.6, 10]
75 | >>> print(std(list), type(std(list)))
76 | 1.7435595774162693
77 | """
78 | size = len(values_array)
79 | mean = sum(values_array) / size
80 | sum_sq = sum((i - mean) * (i - mean) for i in values_array)
81 |
82 | return math.sqrt(sum_sq / size)
83 |
84 |
85 | def avg_std(values_array: list[float]) -> tuple[float, float]:
86 | """
87 | Compute the average of standard deviation.
88 |
89 | Parameters
90 | ----------
91 | values_array : array like of numerical values.
92 | Represents the set of values to compute the operation.
93 |
94 | Returns
95 | -------
96 | float
97 | Represents the value of average.
98 | float
99 | Represents the value of standard deviation.
100 |
101 | Example
102 | -------
103 | >>> from pymove.utils.math import avg_std
104 | >>> list = [7.8, 9.7, 6.4, 5.6, 10]
105 | >>> print(avg_std(list), type(avg_std(list)))
106 | 1.9493588689617927
107 | """
108 | avg = sum(values_array) / len(values_array)
109 | return avg, std(values_array)
110 |
111 |
112 | def std_sample(values_array: list[float]) -> float:
113 | """
114 | Compute the standard deviation of sample.
115 |
116 | Parameters
117 | ----------
118 | values_array : array like of numerical values.
119 | Represents the set of values to compute the operation.
120 |
121 | Returns
122 | -------
123 | float
124 | Represents the value of standard deviation of sample.
125 |
126 | Example
127 | -------
128 | >>> from pymove.utils.math import std_sample
129 | >>> list = [7.8, 9.7, 6.4, 5.6, 10]
130 | >>> print(std_sample(list), type(std_sample(list)))
131 | 1.9493588689617927
132 | """
133 | size = len(values_array)
134 | return std(values_array) * math.sqrt(size / (size - 1))
135 |
136 |
137 | def avg_std_sample(values_array: list[float]) -> tuple[float, float]:
138 | """
139 | Compute the average of standard deviation of sample.
140 |
141 | Parameters
142 | ----------
143 | values_array : array like of numerical values.
144 | Represents the set of values to compute the operation.
145 |
146 | Returns
147 | -------
148 | float
149 | Represents the value of average
150 | float
151 | Represents the standard deviation of sample.
152 |
153 | Example
154 | -------
155 | >>> from pymove.utils.math import avg_std_sample
156 | >>> list = [7.8, 9.7, 6.4, 5.6, 10]
157 | >>> print(avg_std_sample(list), type(avg_std_sample(list)))
158 | (7.9, 1.9493588689617927)
159 | """
160 | avg = sum(values_array) / len(values_array)
161 | return avg, std_sample(values_array)
162 |
163 |
164 | def arrays_avg(
165 | values_array: list[float], weights_array: list[float] | None = None
166 | ) -> float:
167 | """
168 | Computes the mean of the elements of the array.
169 |
170 | Parameters
171 | ----------
172 | values_array : array like of numerical values.
173 | Represents the set of values to compute the operation.
174 | weights_array : array, optional, default None.
175 | Used to calculate the weighted average, indicates the weight of
176 | each element in the array (values_array).
177 |
178 | Returns
179 | -------
180 | float
181 | The mean of the array elements.
182 |
183 | Examples
184 | --------
185 | >>> from pymove.utils.math import arrays_avg
186 | >>> list = [7.8, 9.7, 6.4, 5.6, 10]
187 | >>> weights = [0.1, 0.3, 0.15, 0.15, 0.3]
188 | >>> print('standard average', arrays_avg(list), type(arrays_avg(list)))
189 | 'standard average 7.9 '
190 | >>> print(
191 | >>> 'weighted average: ',
192 | >>> arrays_avg(list, weights),
193 | >>> type(arrays_avg(list, weights))
194 | >>> )
195 | 'weighted average: 1.6979999999999997 '
196 | """
197 | n = len(values_array)
198 |
199 | if weights_array is None:
200 | weights_array = [1] * n
201 | elif len(weights_array) != n:
202 | raise ValueError(
203 | 'values_array and qt_array must have the same number of rows'
204 | )
205 |
206 | result = 0.
207 |
208 | for i, j in zip(values_array, weights_array):
209 | result += i * j
210 |
211 | return result / n
212 |
213 |
214 | def array_stats(values_array: list[float]) -> tuple[float, float, int]:
215 | """
216 | Computes statistics about the array.
217 |
218 | The sum of all the elements in the array, the sum of the square of
219 | each element and the number of elements of the array.
220 |
221 | Parameters
222 | ----------
223 | values_array : array like of numerical values.
224 | Represents the set of values to compute the operation.
225 |
226 | Returns
227 | -------
228 | float.
229 | The sum of all the elements in the array.
230 | float
231 | The sum of the square value of each element in the array.
232 | int.
233 | The number of elements in the array.
234 | Example
235 | -------
236 | >>> from pymove.utils.math import array_stats
237 | >>> list = [7.8, 9.7, 6.4, 5.6, 10]
238 | >>> print(array_stats(list), type(array_stats(list)))
239 | (39.5, 327.25, 5)
240 | """
241 | sum_ = 0.
242 | sum_sq = 0.
243 | n = 0
244 | for item in values_array:
245 | sum_ += item
246 | sum_sq += item * item
247 | n += 1
248 | return sum_, sum_sq, n
249 |
250 |
251 | def interpolation(x0: float, y0: float, x1: float, y1: float, x: float) -> float:
252 | """
253 | Performs interpolation.
254 |
255 | Parameters
256 | ----------
257 | x0 : float.
258 | The coordinate of the first point on the x axis.
259 | y0 : float.
260 | The coordinate of the first point on the y axis.
261 | x1 : float.
262 | The coordinate of the second point on the x axis.
263 | y1 : float.
264 | The coordinate of the second point on the y axis.
265 | x : float.
266 | A value in the interval (x0, x1).
267 |
268 | Returns
269 | -------
270 | float.
271 | Is the interpolated or extrapolated value.
272 |
273 | Example
274 | -------
275 | >>> from pymove.utils.math import interpolation
276 | >>> x0, y0, x1, y1, x = 2, 4, 3, 6, 3.5
277 | >>> print(interpolation(x0,y0,x1,y1,x), type(interpolation(x0,y0,x1,y1,x)))
278 | 7.0
279 | """
280 | return y0 + (y1 - y0) * ((x - x0) / (x1 - x0))
281 |
--------------------------------------------------------------------------------
/pymove/visualization/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains functions to create visualizations of trajectories.
3 |
4 | folium,
5 | matplotlib
6 |
7 | """
8 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | bump2version
2 | coverage
3 | flake8
4 | flake8-bugbear
5 | flake8-docstrings
6 | mypy==0.812
7 | pep8-naming
8 | pre-commit
9 | pytest
10 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | branca
2 | dask[dataframe]
3 | folium>=0.10.1
4 | geohash2
5 | geojson
6 | holidays
7 | IPython
8 | ipywidgets
9 | joblib
10 | matplotlib
11 | mplleaflet
12 | networkx
13 | numpy
14 | pandas>=1.1.0,<1.4.0
15 | psutil
16 | python-dateutil
17 | pytz
18 | scikit-learn
19 | scipy
20 | shapely
21 | tqdm
22 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 3.0.1
3 | tag_name = {new_version}
4 | commit = True
5 |
6 | [bumpversion:file:pymove/__init__.py]
7 |
8 | [bumpversion:file:setup.py]
9 |
10 | [flake8]
11 | ignore = E203, E266, W402, W503, F401, F841, D401
12 | max-line-length = 90
13 | max-complexity = 15
14 | select = B,C,E,D,N,F,W
15 | exclude =
16 | .git,
17 | __pycache__,
18 | *.egg-info,
19 | .venv/*,
20 | venv/*,
21 | .env/*,
22 | env/*,
23 | *.egg.info,
24 | pymove/core/interface.py,
25 | pymove/tests/*
26 | docstring-convention = numpy
27 |
28 | [mypy]
29 | ignore_missing_imports = True
30 | no_warn_no_return = True
31 | show_error_codes = True
32 | files = pymove
33 |
34 | [isort]
35 | multi_line_output = 3
36 | include_trailing_comma = True
37 | line_length = 90
38 | known_third_party = IPython,branca,dask,dateutil,folium,geohash2,holidays,ipywidgets,joblib,matplotlib,networkx,numpy,pandas,psutil,scipy,setuptools,shapely,sklearn,tqdm
39 |
40 | [tool:pytest]
41 | addopts =
42 | --ignore notebooks
43 | --ignore build_tools
44 | --disable-pytest-warnings
45 | --capture sys
46 |
47 | [coverage:run]
48 | omit = .venv/*, venv/*, venv.bak/*, .env/*, env/*, env.bak/*, ENV/*, */tests/*
49 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """Package setup."""
2 |
3 | from setuptools import find_packages, setup
4 |
5 | with open('README.md', 'r') as f:
6 | LONG_DESCRIPTION = f.read()
7 |
8 | with open('requirements.txt') as f:
9 | DEPENDENCIES = f.readlines()
10 |
11 | setup(
12 | name='pymove',
13 | version='3.0.1',
14 | author='Insight Data Science Lab',
15 | author_email='insightlab@dc.ufc.br',
16 | license='MIT',
17 | python_requires='>=3.6',
18 | description='A lib python to processing and visualization '
19 | 'of trajectories and other spatial-temporal data',
20 | long_description=LONG_DESCRIPTION,
21 | long_description_content_type='text/markdown',
22 | url='https://github.com/InsightLab/PyMove',
23 | packages=find_packages(),
24 | classifiers=[
25 | 'Development Status :: 5 - Production/Stable',
26 | 'Programming Language :: Python :: 3',
27 | 'License :: OSI Approved :: MIT License',
28 | 'Operating System :: OS Independent',
29 | 'Typing :: Typed'
30 | ],
31 | install_requires=DEPENDENCIES,
32 | include_package_data=True
33 | )
34 |
--------------------------------------------------------------------------------