├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── __init__.py ├── gd_core ├── __init__.py ├── geodetector.py ├── interaction_fig │ └── interaction.bmp └── optimal_stratification.py ├── geographical_detector.py ├── geographical_detector_algorithm.py ├── geographical_detector_provider.py ├── help ├── Makefile ├── make.bat └── source │ ├── conf.py │ └── index.rst ├── i18n └── af.ts ├── icon.png ├── image ├── Q_GD GUI.png └── Snipaste_2022-04-01_12-18-20.png ├── metadata.txt ├── pb_tool.cfg ├── plugin_upload.py ├── pylintrc ├── scripts ├── compile-strings.sh ├── run-env-linux.sh └── update-strings.sh ├── test ├── __init__.py ├── qgis_interface.py ├── tenbytenraster.asc ├── tenbytenraster.asc.aux.xml ├── tenbytenraster.keywords ├── tenbytenraster.lic ├── tenbytenraster.prj ├── tenbytenraster.qml ├── test_init.py ├── test_qgis_environment.py ├── test_translations.py ├── test_write.py └── utilities.py └── ui └── __init__.py /.gitignore: -------------------------------------------------------------------------------- 1 | # ignore compiled python files 2 | *pyc 3 | plugin_template/*pyc 4 | .idea 5 | .coverage 6 | .noseids 7 | nohup.out 8 | resources.py 9 | 10 | # ignore the help build dir 11 | help/build 12 | 13 | # ignore the data dir 14 | data 15 | pygd 16 | gd_core/__pycache__ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 2, June 1991 3 | 4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc. 5 | 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your 12 | freedom to share and change it. By contrast, the GNU General Public 13 | License is intended to guarantee your freedom to share and change free 14 | software--to make sure the software is free for all its users. This 15 | General Public License applies to most of the Free Software 16 | Foundation's software and to any other program whose authors commit to 17 | using it. (Some other Free Software Foundation software is covered by 18 | the GNU Library General Public License instead.) You can apply it to 19 | your programs, too. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | this service if you wish), that you receive source code or can get it 25 | if you want it, that you can change the software or use pieces of it 26 | in new free programs; and that you know you can do these things. 27 | 28 | To protect your rights, we need to make restrictions that forbid 29 | anyone to deny you these rights or to ask you to surrender the rights. 30 | These restrictions translate to certain responsibilities for you if you 31 | distribute copies of the software, or if you modify it. 32 | 33 | For example, if you distribute copies of such a program, whether 34 | gratis or for a fee, you must give the recipients all the rights that 35 | you have. You must make sure that they, too, receive or can get the 36 | source code. And you must show them these terms so they know their 37 | rights. 38 | 39 | We protect your rights with two steps: (1) copyright the software, and 40 | (2) offer you this license which gives you legal permission to copy, 41 | distribute and/or modify the software. 42 | 43 | Also, for each author's protection and ours, we want to make certain 44 | that everyone understands that there is no warranty for this free 45 | software. If the software is modified by someone else and passed on, we 46 | want its recipients to know that what they have is not the original, so 47 | that any problems introduced by others will not reflect on the original 48 | authors' reputations. 49 | 50 | Finally, any free program is threatened constantly by software 51 | patents. We wish to avoid the danger that redistributors of a free 52 | program will individually obtain patent licenses, in effect making the 53 | program proprietary. To prevent this, we have made it clear that any 54 | patent must be licensed for everyone's free use or not licensed at all. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | GNU GENERAL PUBLIC LICENSE 60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 61 | 62 | 0. This License applies to any program or other work which contains 63 | a notice placed by the copyright holder saying it may be distributed 64 | under the terms of this General Public License. The "Program", below, 65 | refers to any such program or work, and a "work based on the Program" 66 | means either the Program or any derivative work under copyright law: 67 | that is to say, a work containing the Program or a portion of it, 68 | either verbatim or with modifications and/or translated into another 69 | language. (Hereinafter, translation is included without limitation in 70 | the term "modification".) Each licensee is addressed as "you". 71 | 72 | Activities other than copying, distribution and modification are not 73 | covered by this License; they are outside its scope. The act of 74 | running the Program is not restricted, and the output from the Program 75 | is covered only if its contents constitute a work based on the 76 | Program (independent of having been made by running the Program). 77 | Whether that is true depends on what the Program does. 78 | 79 | 1. You may copy and distribute verbatim copies of the Program's 80 | source code as you receive it, in any medium, provided that you 81 | conspicuously and appropriately publish on each copy an appropriate 82 | copyright notice and disclaimer of warranty; keep intact all the 83 | notices that refer to this License and to the absence of any warranty; 84 | and give any other recipients of the Program a copy of this License 85 | along with the Program. 86 | 87 | You may charge a fee for the physical act of transferring a copy, and 88 | you may at your option offer warranty protection in exchange for a fee. 89 | 90 | 2. You may modify your copy or copies of the Program or any portion 91 | of it, thus forming a work based on the Program, and copy and 92 | distribute such modifications or work under the terms of Section 1 93 | above, provided that you also meet all of these conditions: 94 | 95 | a) You must cause the modified files to carry prominent notices 96 | stating that you changed the files and the date of any change. 97 | 98 | b) You must cause any work that you distribute or publish, that in 99 | whole or in part contains or is derived from the Program or any 100 | part thereof, to be licensed as a whole at no charge to all third 101 | parties under the terms of this License. 102 | 103 | c) If the modified program normally reads commands interactively 104 | when run, you must cause it, when started running for such 105 | interactive use in the most ordinary way, to print or display an 106 | announcement including an appropriate copyright notice and a 107 | notice that there is no warranty (or else, saying that you provide 108 | a warranty) and that users may redistribute the program under 109 | these conditions, and telling the user how to view a copy of this 110 | License. (Exception: if the Program itself is interactive but 111 | does not normally print such an announcement, your work based on 112 | the Program is not required to print an announcement.) 113 | 114 | These requirements apply to the modified work as a whole. If 115 | identifiable sections of that work are not derived from the Program, 116 | and can be reasonably considered independent and separate works in 117 | themselves, then this License, and its terms, do not apply to those 118 | sections when you distribute them as separate works. But when you 119 | distribute the same sections as part of a whole which is a work based 120 | on the Program, the distribution of the whole must be on the terms of 121 | this License, whose permissions for other licensees extend to the 122 | entire whole, and thus to each and every part regardless of who wrote it. 123 | 124 | Thus, it is not the intent of this section to claim rights or contest 125 | your rights to work written entirely by you; rather, the intent is to 126 | exercise the right to control the distribution of derivative or 127 | collective works based on the Program. 128 | 129 | In addition, mere aggregation of another work not based on the Program 130 | with the Program (or with a work based on the Program) on a volume of 131 | a storage or distribution medium does not bring the other work under 132 | the scope of this License. 133 | 134 | 3. You may copy and distribute the Program (or a work based on it, 135 | under Section 2) in object code or executable form under the terms of 136 | Sections 1 and 2 above provided that you also do one of the following: 137 | 138 | a) Accompany it with the complete corresponding machine-readable 139 | source code, which must be distributed under the terms of Sections 140 | 1 and 2 above on a medium customarily used for software interchange; or, 141 | 142 | b) Accompany it with a written offer, valid for at least three 143 | years, to give any third party, for a charge no more than your 144 | cost of physically performing source distribution, a complete 145 | machine-readable copy of the corresponding source code, to be 146 | distributed under the terms of Sections 1 and 2 above on a medium 147 | customarily used for software interchange; or, 148 | 149 | c) Accompany it with the information you received as to the offer 150 | to distribute corresponding source code. (This alternative is 151 | allowed only for noncommercial distribution and only if you 152 | received the program in object code or executable form with such 153 | an offer, in accord with Subsection b above.) 154 | 155 | The source code for a work means the preferred form of the work for 156 | making modifications to it. For an executable work, complete source 157 | code means all the source code for all modules it contains, plus any 158 | associated interface definition files, plus the scripts used to 159 | control compilation and installation of the executable. However, as a 160 | special exception, the source code distributed need not include 161 | anything that is normally distributed (in either source or binary 162 | form) with the major components (compiler, kernel, and so on) of the 163 | operating system on which the executable runs, unless that component 164 | itself accompanies the executable. 165 | 166 | If distribution of executable or object code is made by offering 167 | access to copy from a designated place, then offering equivalent 168 | access to copy the source code from the same place counts as 169 | distribution of the source code, even though third parties are not 170 | compelled to copy the source along with the object code. 171 | 172 | 4. You may not copy, modify, sublicense, or distribute the Program 173 | except as expressly provided under this License. Any attempt 174 | otherwise to copy, modify, sublicense or distribute the Program is 175 | void, and will automatically terminate your rights under this License. 176 | However, parties who have received copies, or rights, from you under 177 | this License will not have their licenses terminated so long as such 178 | parties remain in full compliance. 179 | 180 | 5. You are not required to accept this License, since you have not 181 | signed it. However, nothing else grants you permission to modify or 182 | distribute the Program or its derivative works. These actions are 183 | prohibited by law if you do not accept this License. Therefore, by 184 | modifying or distributing the Program (or any work based on the 185 | Program), you indicate your acceptance of this License to do so, and 186 | all its terms and conditions for copying, distributing or modifying 187 | the Program or works based on it. 188 | 189 | 6. Each time you redistribute the Program (or any work based on the 190 | Program), the recipient automatically receives a license from the 191 | original licensor to copy, distribute or modify the Program subject to 192 | these terms and conditions. You may not impose any further 193 | restrictions on the recipients' exercise of the rights granted herein. 194 | You are not responsible for enforcing compliance by third parties to 195 | this License. 196 | 197 | 7. If, as a consequence of a court judgment or allegation of patent 198 | infringement or for any other reason (not limited to patent issues), 199 | conditions are imposed on you (whether by court order, agreement or 200 | otherwise) that contradict the conditions of this License, they do not 201 | excuse you from the conditions of this License. If you cannot 202 | distribute so as to satisfy simultaneously your obligations under this 203 | License and any other pertinent obligations, then as a consequence you 204 | may not distribute the Program at all. For example, if a patent 205 | license would not permit royalty-free redistribution of the Program by 206 | all those who receive copies directly or indirectly through you, then 207 | the only way you could satisfy both it and this License would be to 208 | refrain entirely from distribution of the Program. 209 | 210 | If any portion of this section is held invalid or unenforceable under 211 | any particular circumstance, the balance of the section is intended to 212 | apply and the section as a whole is intended to apply in other 213 | circumstances. 214 | 215 | It is not the purpose of this section to induce you to infringe any 216 | patents or other property right claims or to contest validity of any 217 | such claims; this section has the sole purpose of protecting the 218 | integrity of the free software distribution system, which is 219 | implemented by public license practices. Many people have made 220 | generous contributions to the wide range of software distributed 221 | through that system in reliance on consistent application of that 222 | system; it is up to the author/donor to decide if he or she is willing 223 | to distribute software through any other system and a licensee cannot 224 | impose that choice. 225 | 226 | This section is intended to make thoroughly clear what is believed to 227 | be a consequence of the rest of this License. 228 | 229 | 8. If the distribution and/or use of the Program is restricted in 230 | certain countries either by patents or by copyrighted interfaces, the 231 | original copyright holder who places the Program under this License 232 | may add an explicit geographical distribution limitation excluding 233 | those countries, so that distribution is permitted only in or among 234 | countries not thus excluded. In such case, this License incorporates 235 | the limitation as if written in the body of this License. 236 | 237 | 9. The Free Software Foundation may publish revised and/or new versions 238 | of the General Public License from time to time. Such new versions will 239 | be similar in spirit to the present version, but may differ in detail to 240 | address new problems or concerns. 241 | 242 | Each version is given a distinguishing version number. If the Program 243 | specifies a version number of this License which applies to it and "any 244 | later version", you have the option of following the terms and conditions 245 | either of that version or of any later version published by the Free 246 | Software Foundation. If the Program does not specify a version number of 247 | this License, you may choose any version ever published by the Free Software 248 | Foundation. 249 | 250 | 10. If you wish to incorporate parts of the Program into other free 251 | programs whose distribution conditions are different, write to the author 252 | to ask for permission. For software which is copyrighted by the Free 253 | Software Foundation, write to the Free Software Foundation; we sometimes 254 | make exceptions for this. Our decision will be guided by the two goals 255 | of preserving the free status of all derivatives of our free software and 256 | of promoting the sharing and reuse of software generally. 257 | 258 | NO WARRANTY 259 | 260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN 262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED 264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS 266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE 267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, 268 | REPAIR OR CORRECTION. 269 | 270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR 272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, 273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING 274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED 275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY 276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER 277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 278 | POSSIBILITY OF SUCH DAMAGES. 279 | 280 | END OF TERMS AND CONDITIONS 281 | 282 | How to Apply These Terms to Your New Programs 283 | 284 | If you develop a new program, and you want it to be of the greatest 285 | possible use to the public, the best way to achieve this is to make it 286 | free software which everyone can redistribute and change under these terms. 287 | 288 | To do so, attach the following notices to the program. It is safest 289 | to attach them to the start of each source file to most effectively 290 | convey the exclusion of warranty; and each file should have at least 291 | the "copyright" line and a pointer to where the full notice is found. 292 | 293 | 294 | Copyright (C) 295 | 296 | This program is free software; you can redistribute it and/or modify 297 | it under the terms of the GNU General Public License as published by 298 | the Free Software Foundation; either version 2 of the License, or 299 | (at your option) any later version. 300 | 301 | This program is distributed in the hope that it will be useful, 302 | but WITHOUT ANY WARRANTY; without even the implied warranty of 303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 304 | GNU General Public License for more details. 305 | 306 | You should have received a copy of the GNU General Public License 307 | along with this program; if not, write to the Free Software 308 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 309 | 310 | 311 | Also add information on how to contact you by electronic and paper mail. 312 | 313 | If the program is interactive, make it output a short notice like this 314 | when it starts in an interactive mode: 315 | 316 | Gnomovision version 69, Copyright (C) year name of author 317 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 318 | This is free software, and you are welcome to redistribute it 319 | under certain conditions; type `show c' for details. 320 | 321 | The hypothetical commands `show w' and `show c' should show the appropriate 322 | parts of the General Public License. Of course, the commands you use may 323 | be called something other than `show w' and `show c'; they could even be 324 | mouse-clicks or menu items--whatever suits your program. 325 | 326 | You should also get your employer (if you work as a programmer) or your 327 | school, if any, to sign a "copyright disclaimer" for the program, if 328 | necessary. Here is a sample; alter the names: 329 | 330 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program 331 | `Gnomovision' (which makes passes at compilers) written by James Hacker. 332 | 333 | , 1 April 1989 334 | Ty Coon, President of Vice 335 | 336 | This General Public License does not permit incorporating your program into 337 | proprietary programs. If your program is a subroutine library, you may 338 | consider it more useful to permit linking proprietary applications with the 339 | library. If this is what you want to do, use the GNU Library General 340 | Public License instead of this License. 341 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | #/*************************************************************************** 2 | # Geo_detector 3 | # 4 | # This plugin adds an algorithm to measure the spatial stratified heter 5 | # ------------------- 6 | # begin : 2021-12-21 7 | # git sha : $Format:%H$ 8 | # copyright : (C) 2021 by Guojg 9 | # email : guojg@lreis.ac.cn 10 | # ***************************************************************************/ 11 | # 12 | #/*************************************************************************** 13 | # * * 14 | # * This program is free software; you can redistribute it and/or modify * 15 | # * it under the terms of the GNU General Public License as published by * 16 | # * the Free Software Foundation; either version 2 of the License, or * 17 | # * (at your option) any later version. * 18 | # * * 19 | # ***************************************************************************/ 20 | 21 | ################################################# 22 | # Edit the following to match your sources lists 23 | ################################################# 24 | 25 | 26 | #Add iso code for any locales you want to support here (space separated) 27 | # default is no locales 28 | # LOCALES = af 29 | LOCALES = 30 | 31 | # If locales are enabled, set the name of the lrelease binary on your system. If 32 | # you have trouble compiling the translations, you may have to specify the full path to 33 | # lrelease 34 | #LRELEASE = lrelease 35 | #LRELEASE = lrelease-qt4 36 | 37 | 38 | # translation 39 | SOURCES = \ 40 | __init__.py \ 41 | geographical_detector.py 42 | 43 | PLUGINNAME = geographical_detector 44 | 45 | PY_FILES = \ 46 | __init__.py \ 47 | geographical_detector.py 48 | 49 | UI_FILES = 50 | 51 | EXTRAS = metadata.txt 52 | 53 | EXTRA_DIRS = 54 | 55 | COMPILED_RESOURCE_FILES = 56 | 57 | PEP8EXCLUDE=pydev,resources.py,conf.py,third_party,ui 58 | 59 | # QGISDIR points to the location where your plugin should be installed. 60 | # This varies by platform, relative to your HOME directory: 61 | # * Linux: 62 | # .local/share/QGIS/QGIS3/profiles/default/python/plugins/ 63 | # * Mac OS X: 64 | # Library/Application Support/QGIS/QGIS3/profiles/default/python/plugins 65 | # * Windows: 66 | # AppData\Roaming\QGIS\QGIS3\profiles\default\python\plugins' 67 | 68 | QGISDIR=C:\Users\guojg\AppData/Roaming/QGIS/QGIS3/profiles/default/python/plugins 69 | 70 | ################################################# 71 | # Normally you would not need to edit below here 72 | ################################################# 73 | 74 | HELP = help/build/html 75 | 76 | PLUGIN_UPLOAD = $(c)/plugin_upload.py 77 | 78 | RESOURCE_SRC=$(shell grep '^ *@@g;s/.*>//g' | tr '\n' ' ') 79 | 80 | .PHONY: default 81 | default: 82 | @echo While you can use make to build and deploy your plugin, pb_tool 83 | @echo is a much better solution. 84 | @echo A Python script, pb_tool provides platform independent management of 85 | @echo your plugins and runs anywhere. 86 | @echo You can install pb_tool using: pip install pb_tool 87 | @echo See https://g-sherman.github.io/plugin_build_tool/ for info. 88 | 89 | compile: $(COMPILED_RESOURCE_FILES) 90 | 91 | %.py : %.qrc $(RESOURCES_SRC) 92 | pyrcc5 -o $*.py $< 93 | 94 | %.qm : %.ts 95 | $(LRELEASE) $< 96 | 97 | test: compile transcompile 98 | @echo 99 | @echo "----------------------" 100 | @echo "Regression Test Suite" 101 | @echo "----------------------" 102 | 103 | @# Preceding dash means that make will continue in case of errors 104 | @-export PYTHONPATH=`pwd`:$(PYTHONPATH); \ 105 | export QGIS_DEBUG=0; \ 106 | export QGIS_LOG_FILE=/dev/null; \ 107 | nosetests -v --with-id --with-coverage --cover-package=. \ 108 | 3>&1 1>&2 2>&3 3>&- || true 109 | @echo "----------------------" 110 | @echo "If you get a 'no module named qgis.core error, try sourcing" 111 | @echo "the helper script we have provided first then run make test." 112 | @echo "e.g. source run-env-linux.sh ; make test" 113 | @echo "----------------------" 114 | 115 | deploy: compile doc transcompile 116 | @echo 117 | @echo "------------------------------------------" 118 | @echo "Deploying plugin to your .qgis2 directory." 119 | @echo "------------------------------------------" 120 | # The deploy target only works on unix like operating system where 121 | # the Python plugin directory is located at: 122 | # $HOME/$(QGISDIR)/python/plugins 123 | mkdir -p $(HOME)/$(QGISDIR)/python/plugins/$(PLUGINNAME) 124 | cp -vf $(PY_FILES) $(HOME)/$(QGISDIR)/python/plugins/$(PLUGINNAME) 125 | cp -vf $(UI_FILES) $(HOME)/$(QGISDIR)/python/plugins/$(PLUGINNAME) 126 | cp -vf $(COMPILED_RESOURCE_FILES) $(HOME)/$(QGISDIR)/python/plugins/$(PLUGINNAME) 127 | cp -vf $(EXTRAS) $(HOME)/$(QGISDIR)/python/plugins/$(PLUGINNAME) 128 | cp -vfr i18n $(HOME)/$(QGISDIR)/python/plugins/$(PLUGINNAME) 129 | cp -vfr $(HELP) $(HOME)/$(QGISDIR)/python/plugins/$(PLUGINNAME)/help 130 | # Copy extra directories if any 131 | (foreach EXTRA_DIR,(EXTRA_DIRS), cp -R (EXTRA_DIR) (HOME)/(QGISDIR)/python/plugins/(PLUGINNAME)/;) 132 | 133 | 134 | # The dclean target removes compiled python files from plugin directory 135 | # also deletes any .git entry 136 | dclean: 137 | @echo 138 | @echo "-----------------------------------" 139 | @echo "Removing any compiled python files." 140 | @echo "-----------------------------------" 141 | find $(HOME)/$(QGISDIR)/python/plugins/$(PLUGINNAME) -iname "*.pyc" -delete 142 | find $(HOME)/$(QGISDIR)/python/plugins/$(PLUGINNAME) -iname ".git" -prune -exec rm -Rf {} \; 143 | 144 | 145 | derase: 146 | @echo 147 | @echo "-------------------------" 148 | @echo "Removing deployed plugin." 149 | @echo "-------------------------" 150 | rm -Rf $(HOME)/$(QGISDIR)/python/plugins/$(PLUGINNAME) 151 | 152 | zip: deploy dclean 153 | @echo 154 | @echo "---------------------------" 155 | @echo "Creating plugin zip bundle." 156 | @echo "---------------------------" 157 | # The zip target deploys the plugin and creates a zip file with the deployed 158 | # content. You can then upload the zip file on http://plugins.qgis.org 159 | rm -f $(PLUGINNAME).zip 160 | cd $(HOME)/$(QGISDIR)/python/plugins; zip -9r $(CURDIR)/$(PLUGINNAME).zip $(PLUGINNAME) 161 | 162 | package: compile 163 | # Create a zip package of the plugin named $(PLUGINNAME).zip. 164 | # This requires use of git (your plugin development directory must be a 165 | # git repository). 166 | # To use, pass a valid commit or tag as follows: 167 | # make package VERSION=Version_0.3.2 168 | @echo 169 | @echo "------------------------------------" 170 | @echo "Exporting plugin to zip package. " 171 | @echo "------------------------------------" 172 | rm -f $(PLUGINNAME).zip 173 | git archive --prefix=$(PLUGINNAME)/ -o $(PLUGINNAME).zip $(VERSION) 174 | echo "Created package: $(PLUGINNAME).zip" 175 | 176 | upload: zip 177 | @echo 178 | @echo "-------------------------------------" 179 | @echo "Uploading plugin to QGIS Plugin repo." 180 | @echo "-------------------------------------" 181 | $(PLUGIN_UPLOAD) $(PLUGINNAME).zip 182 | 183 | transup: 184 | @echo 185 | @echo "------------------------------------------------" 186 | @echo "Updating translation files with any new strings." 187 | @echo "------------------------------------------------" 188 | @chmod +x scripts/update-strings.sh 189 | @scripts/update-strings.sh $(LOCALES) 190 | 191 | transcompile: 192 | @echo 193 | @echo "----------------------------------------" 194 | @echo "Compiled translation files to .qm files." 195 | @echo "----------------------------------------" 196 | @chmod +x scripts/compile-strings.sh 197 | @scripts/compile-strings.sh $(LRELEASE) $(LOCALES) 198 | 199 | transclean: 200 | @echo 201 | @echo "------------------------------------" 202 | @echo "Removing compiled translation files." 203 | @echo "------------------------------------" 204 | rm -f i18n/*.qm 205 | 206 | clean: 207 | @echo 208 | @echo "------------------------------------" 209 | @echo "Removing uic and rcc generated files" 210 | @echo "------------------------------------" 211 | rm $(COMPILED_UI_FILES) $(COMPILED_RESOURCE_FILES) 212 | 213 | doc: 214 | @echo 215 | @echo "------------------------------------" 216 | @echo "Building documentation using sphinx." 217 | @echo "------------------------------------" 218 | cd help; make html 219 | 220 | pylint: 221 | @echo 222 | @echo "-----------------" 223 | @echo "Pylint violations" 224 | @echo "-----------------" 225 | @pylint --reports=n --rcfile=pylintrc . || true 226 | @echo 227 | @echo "----------------------" 228 | @echo "If you get a 'no module named qgis.core' error, try sourcing" 229 | @echo "the helper script we have provided first then run make pylint." 230 | @echo "e.g. source run-env-linux.sh ; make pylint" 231 | @echo "----------------------" 232 | 233 | 234 | # Run pep8 style checking 235 | #http://pypi.python.org/pypi/pep8 236 | pep8: 237 | @echo 238 | @echo "-----------" 239 | @echo "PEP8 issues" 240 | @echo "-----------" 241 | @pep8 --repeat --ignore=E203,E121,E122,E123,E124,E125,E126,E127,E128 --exclude $(PEP8EXCLUDE) . || true 242 | @echo "-----------" 243 | @echo "Ignored in PEP8 check:" 244 | @echo $(PEP8EXCLUDE) 245 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # QGIS-Geographical-detector 2 | 3 | QGIS geographical detector plugin: Tool for measuring spatial stratified heterogeneity and spatial associations of geographical attributes. 4 | 5 | 6 | ## Geographical detector 7 | Geographical detector, or GeoDetector, is a statistical tool to measure Spatial Stratified Heterogeneity (SSH) and to make attribution for/by SSH; 8 | (1) measure and find SSH among data; 9 | (2) test the coupling between two variables Y and X and 10 | (3) investigate interaction between two explanatory variables X1 and X2 to a response variable Y. 11 | ## Installation 12 | 13 | ![Menus and procedure for one-time activation of the Geographical detector plugin within QGIS](image/Snipaste_2022-04-01_12-18-20.png) 14 | 15 | ## Parameters 16 | ![Q_GD GUI](https://github.com/gsnrguo/QGIS-Geographical-detector/blob/main/image/Q_GD%20GUI.png) 17 | ### Basic parameters 18 | 1. Input layer: vector layer 19 | 2. Study variable : field name of study variable 20 | 3. Field(s) with categories [optional]: field(s) of categories explanatory variables 21 | 4. Field(s) with numeric [optional]: field(s) of numeric explanatory variables 22 | 23 | *Parameters 3 and 4 cannot both be empty, if parameter 4 is not empty, then a stratification procedure is required.* 24 | 25 | ### Advanced parameters (stratification parameters) 26 | 27 | 1. Maximum number of groups [optional]: Maximum number of strata, if the Maximum number is equal to 28 | 2. Minimum number of groups [optional]: 29 | 3. Field for equality constraint [optional]: equality means the populations/geographical areas in the new strata are of sufficient size and as similar as possible 30 | 4. Minimum ratio for equality measures [optional]: restrict the minimum population/geographical area in each strata 31 | 5. The number of samples for stratification [optional]: if the number of vector is too large, sampling is 32 | 6. Minimum threshold for q-value increase [optional]: will be deprecated in the new version 33 | 7. Cross-validation number: default is 10 34 | 8. Cross-validation random stata [optional]: random seed, if a random seed is given so that the results of the running are reproducible 35 | 9. Times of repeating cross-validation [optional] : Improving the stability of stratification results. 36 | 37 | ### Author 38 | 39 | Jiangang Guo (); Jinfeng Wang () 40 | 41 | ## References 42 | 1. Wang JF, Li XH, Christakos G, Liao YL, Zhang T, Gu X & Zheng XY. 2010. Geographical detectors-based health risk assessment and its application in the neural tube defects study of the Heshun region, China. International Journal of Geographical Information Science 24(1): 107-127. 43 | 2. Wang JF, Zhang TL, Fu BJ. 2016. A measure of spatial stratified heterogeneity. Ecological Indicators 67: 250-256. 44 | 3. GeoDetector Website (http://www.geodetector.cn/) 45 | 46 | 47 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | /*************************************************************************** 4 | Geo_detector 5 | A QGIS plugin 6 | This plugin adds an algorithm to measure the spatial stratified heter 7 | Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ 8 | ------------------- 9 | begin : 2021-12-21 10 | copyright : (C) 2021 by Guojg 11 | email : guojg@lreis.ac.cn 12 | ***************************************************************************/ 13 | 14 | /*************************************************************************** 15 | * * 16 | * This program is free software; you can redistribute it and/or modify * 17 | * it under the terms of the GNU General Public License as published by * 18 | * the Free Software Foundation; either version 2 of the License, or * 19 | * (at your option) any later version. * 20 | * * 21 | ***************************************************************************/ 22 | This script initializes the plugin, making it known to QGIS. 23 | """ 24 | 25 | __author__ = 'Guojg' 26 | __date__ = '2021-12-21' 27 | __copyright__ = '(C) 2021 by Guojg' 28 | 29 | try: 30 | import gd_corecore 31 | except Exception: 32 | import os 33 | import site 34 | 35 | site.addsitedir(os.path.abspath(os.path.dirname(__file__))) 36 | 37 | 38 | # noinspection PyPep8Naming 39 | def classFactory(iface): # pylint: disable=invalid-name 40 | """Load Geo_detector class from file Geo_detector. 41 | 42 | :param iface: A QGIS interface instance. 43 | :type iface: QgsInterface 44 | """ 45 | # 46 | from .geographical_detector import Geo_detectorPlugin 47 | return Geo_detectorPlugin(iface) 48 | -------------------------------------------------------------------------------- /gd_core/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2021/12/21 21:47 4 | # @Author : gjg 5 | # @Site : 6 | # @File : __init__.py.py 7 | # @Software: PyCharm 8 | -------------------------------------------------------------------------------- /gd_core/geodetector.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2021/4/6 21:09 4 | # @Author : gjg 5 | # @Site : 6 | # @File : geodetector.py 7 | # @Software: PyCharm 8 | 9 | import os 10 | import numpy as np 11 | import pandas as pd 12 | from itertools import combinations 13 | import xlwt 14 | from xlwt import easyxf 15 | from scipy.stats import f, levene, ncf, ttest_ind 16 | from scipy.special import ncfdtrinc 17 | 18 | gd_path = os.path.dirname(__file__) 19 | 20 | 21 | class GeoDetector(object): 22 | def __init__(self, data, x_names, y_name, save_path=None, alpha=0.05): 23 | """ 24 | 25 | Args: 26 | data: 27 | x_names: 28 | y_name: 29 | save_path: 30 | alpha: 31 | """ 32 | # 33 | self.file_name = None 34 | self.x_names = x_names 35 | self.y_name = y_name 36 | self.alpha = alpha 37 | if save_path: 38 | self.save_path = save_path 39 | else: 40 | self.save_path = os.getcwd() 41 | for x_name in x_names: 42 | if data[x_name].values[0] is not str: 43 | data.loc[:, x_name] = data[x_name].astype(str).to_numpy() 44 | self.data = data 45 | self.n = len(data) 46 | # 47 | self.var_pop = np.var(self.data[self.y_name], ddof=0) 48 | self.var_sam = np.var(self.data[self.y_name], ddof=1) 49 | self.sst = self.var_pop * self.n 50 | self.len_x = len(self.x_names) 51 | self.factor_detector = self._factor_detector() 52 | 53 | pd.set_option('mode.chained_assignment', None) 54 | 55 | def _factor_detector(self): 56 | """ 57 | Compares the accumulated dispersion variance of each sub-group with the dispersion variance of the all 58 | """ 59 | len_x = len(self.x_names) 60 | factor_result = pd.DataFrame( 61 | {"q": [0] * len_x, "p-value": [0] * len_x, "num_strata": [0] * len_x, "ci_90": None, "ci_95": None, "ci_99": None}) 62 | factor_result.index = self.x_names 63 | for x_name in self.x_names: 64 | data_i = self.data[[x_name, self.y_name]] 65 | mean_h = data_i.groupby(x_name)[self.y_name].mean() 66 | var_h = data_i.groupby(x_name)[self.y_name].agg(np.var, ddof=0) 67 | n_h = data_i.groupby(x_name)[self.y_name].count() 68 | q_i, sig_i, ci_90, ci_95, ci_99 = self._q_calculate(mean_h, var_h, n_h) 69 | factor_result.loc[x_name, :] = [q_i, sig_i, len(n_h), ci_90, ci_95, ci_99] 70 | 71 | return factor_result 72 | 73 | @property 74 | def risk_detector(self): 75 | """ 76 | Compares the difference of average values between sub-groups 77 | """ 78 | risk_result = dict() 79 | for x_name in self.x_names: 80 | risk_name = self.data.groupby(x_name)[self.y_name].mean() 81 | strata = np.sort(self.data[x_name].unique()) 82 | t_test = np.empty((len(strata), len(strata))) 83 | t_test.fill(np.nan) 84 | t_test_strata = pd.DataFrame(t_test, index=strata, columns=strata) 85 | for i in range(len(strata) - 1): 86 | for j in range(i + 1, len(strata)): 87 | y_i = self.data.loc[self.data[x_name] == strata[i], [self.y_name]] 88 | y_j = self.data.loc[self.data[x_name] == strata[j], [self.y_name]] 89 | y_i = np.array(y_i).reshape(-1) 90 | y_j = np.array(y_j).reshape(-1) 91 | # hypothesis testing of variance homogeneity 92 | levene_result = levene(y_i, y_j) 93 | if levene_result.pvalue < self.alpha: 94 | # variance non-homogeneous 95 | ttest_result = ttest_ind(y_i, y_j, equal_var=False) 96 | else: 97 | ttest_result = ttest_ind(y_i, y_j) 98 | 99 | t_test_strata.iloc[j, i] = ttest_result.pvalue <= self.alpha 100 | 101 | risk_x_name = dict(xname=x_name, risk=risk_name, ttest_stra=t_test_strata) 102 | risk_result[x_name] = risk_x_name 103 | return risk_result 104 | 105 | # @property 106 | 107 | @property 108 | def interaction_detector(self): 109 | """ 110 | Compares the sum of the disease contribution of two individual attributes vs. the contribution of the two 111 | attributes when taken together. 112 | """ 113 | 114 | factor_detector = self.factor_detector 115 | q = factor_detector['q'].to_numpy() 116 | q_sig = factor_detector['p-value'].to_numpy() 117 | # interaction 118 | index_com = combinations(range(len(q)), 2) 119 | index_com_value = [q[list(i)] for i in index_com] 120 | fuc_value = np.array([[i.min(), i.max(), i.sum()] for i in index_com_value]) 121 | df_index = combinations(self.x_names, 2) 122 | df_index = [i for i in df_index] 123 | df_interaction = pd.DataFrame(index=df_index, columns=['inter_value', 'inter_action']) 124 | 125 | interaction_result_q = np.diag(q) 126 | interaction_result_sig = np.diag(q_sig) 127 | num_index = 0 128 | for i in range(self.len_x - 1): 129 | for j in range(i + 1, self.len_x): 130 | temp_data = pd.DataFrame(self.data[self.y_name]) 131 | temp_data['inter_name'] = self.data[self.x_names[i]] + self.data[self.x_names[j]] 132 | mean_h = temp_data.groupby("inter_name")[self.y_name].mean() 133 | var_h = temp_data.groupby("inter_name")[self.y_name].agg(np.var, ddof=0) 134 | n_h = temp_data.groupby("inter_name")[self.y_name].count() 135 | q_i, sig_i, _, _, _ = self._q_calculate(mean_h, var_h, n_h) 136 | interaction_result_q[j, i] = q_i 137 | interaction_result_sig[j, i] = sig_i 138 | # interaction 139 | df_interaction.iloc[num_index, 0] = q_i 140 | num_index += 1 141 | 142 | q_value = pd.DataFrame(data=interaction_result_q, index=self.x_names, columns=self.x_names) 143 | sig_value = pd.DataFrame(data=interaction_result_sig, index=self.x_names, columns=self.x_names) 144 | # interaction result 145 | inter_action = ['Weaken_nonlinear', 'Weaken_uni-', 'Enhance_bi-', 'Independent', 'Enhance_nonlinear'] 146 | # temp_bool 147 | df_interaction.loc[:, 'inter_action'] = 'Enhance_bi-' 148 | independent_bool = fuc_value[:, 2] == df_interaction['inter_value'].to_numpy() 149 | df_interaction.loc[independent_bool, 'inter_action'] = 'Independent' 150 | enhance_non_bool = (fuc_value[:, 2] < df_interaction['inter_value'].to_numpy()) 151 | df_interaction.loc[enhance_non_bool, 'inter_action'] = 'Enhance_nonlinear' 152 | 153 | interaction_result = dict(q=q_value, sig=sig_value, interaction=df_interaction) 154 | return interaction_result 155 | 156 | @property 157 | def ecological_detector(self): 158 | """ 159 | Compares the variance calculated from each sub-groups divided according to one determinant with that divided 160 | according to another determinant 161 | 162 | """ 163 | eco_array = np.empty((self.len_x, self.len_x)) 164 | eco_array.fill(np.nan) 165 | ecological_result = pd.DataFrame(eco_array, columns=self.x_names, index=self.x_names) 166 | 167 | for i in range(self.len_x - 1): 168 | x_name_i = self.x_names[i] 169 | ssw_i = self.stat_stratum(x_name_i) 170 | for j in range(i + 1, self.len_x): 171 | x_name_j = self.x_names[j] 172 | ssw_j = self.stat_stratum(x_name_j) 173 | ecological_result.iloc[j, i] = ssw_i / ssw_j > f.ppf( 174 | 1 - self.alpha, dfn=len(self.data), dfd=len(self.data) 175 | ) 176 | return ecological_result 177 | 178 | def _q_calculate(self, mean_h, var_h, n_h): 179 | len_stratum = var_h.size 180 | sse = np.dot(var_h, n_h) 181 | q_i = 1 - sse / self.sst 182 | dfn = len_stratum - 1 183 | dfd = self.n - len_stratum 184 | # sig 185 | fv = dfd * q_i / (dfn * (1 - q_i)) 186 | nc_para = (pow(mean_h, 2).sum() - pow(np.dot(np.sqrt(n_h), mean_h), 2) / self.n) / self.var_sam 187 | sig_i = 1 - ncf.cdf(fv, dfn, dfd, nc_para) 188 | # confidence interval 189 | ncp_inter_90 = ncfdtrinc(dfn, dfd, [0.95, 0.05], fv) 190 | q_inter_90 = ncp_inter_90 / (ncp_inter_90 + dfn + dfd + 1) 191 | ncp_inter_95 = ncfdtrinc(dfn, dfd, [0.975, 0.025], fv) 192 | q_inter_95 = ncp_inter_95 / (ncp_inter_95 + dfn + dfd + 1) 193 | ncp_inter_99 = ncfdtrinc(dfn, dfd, [0.995, 0.005], fv) 194 | q_inter_99 = ncp_inter_99 / (ncp_inter_99 + dfn + dfd + 1) 195 | # format the interval 196 | ci_90 = ('%.4f' % q_inter_90[0]) + "-" + ('%.4f' % q_inter_90[1]) 197 | ci_95 = ('%.4f' % q_inter_95[0]) + "-" + ('%.4f' % q_inter_95[1]) 198 | ci_99 = ('%.4f' % q_inter_99[0]) + "-" + ('%.4f' % q_inter_99[1]) 199 | return q_i, sig_i, ci_90, ci_95, ci_99 200 | 201 | def stat_stratum(self, x_name): 202 | var_st = self.data.groupby(x_name)[self.y_name].agg( 203 | np.var, ddof=0 204 | ) 205 | n_st = self.data.groupby(x_name)[self.y_name].count() 206 | ssw_st = (var_st * n_st).sum() 207 | 208 | return ssw_st 209 | 210 | def save_to_xls(self, save_file): 211 | num_style = easyxf("borders: left thin, right thin, top thin, bottom thin;" "align: vertical center,wrap off, " 212 | "horizontal center;", num_format_str='#,##0.0000') 213 | str_style = easyxf("borders: left thin, right thin, top thin, bottom thin;" "align: vertical center, wrap off," 214 | "horizontal center;") 215 | len_x = len(self.x_names) 216 | gd_xls = xlwt.Workbook() 217 | ws_input = gd_xls.add_sheet('Input data') 218 | ws_input = self._xls_write_df(df=self.data, worksheet=ws_input) 219 | # risk-detector 220 | ws_risk = gd_xls.add_sheet('Risk detector') 221 | risk_data = self.risk_detector 222 | row, col = 0, 0 223 | for x in self.x_names: 224 | x_risk = risk_data[x]['risk'] 225 | x_ttest = risk_data[x]['ttest_stra'] 226 | len_strata = len(x_risk) 227 | ws_risk.write_merge(row, row, col, col + len_strata, x + ': risk') 228 | ws_risk.write_merge(row + 4, row + 4, col, col + len_strata, x + ' t-test: 0.05') 229 | ws_risk.write(row + 5, 0, style=str_style) 230 | 231 | for i in range(len_strata): 232 | ws_risk.write(row + 1, i + col, x_risk.index[i], style=str_style) 233 | ws_risk.write(row + 2, i + col, x_risk[i], style=num_style) 234 | # ws_risk = self._xls_write_df(df=self.data, worksheet=ws_input) 235 | 236 | ws_risk.write(row + 5, col + i + 1, str(x_risk.index[i]), style=str_style) 237 | ws_risk.write(row + 6 + i, col, str(x_risk.index[i]), style=str_style) 238 | for j in range(i): 239 | ws_risk.write(row + 6 + i, col + j + 1, bool(x_ttest.iloc[i, j]), style=str_style) 240 | for spi in range(i, len_strata): 241 | ws_risk.write(row + 6 + i, col + spi + 1, style=str_style) 242 | 243 | row = row + len_strata + 8 244 | 245 | # factor-detector 246 | ws_fact = gd_xls.add_sheet('Factor detector') 247 | ws_fact = self._xls_write_df(df=self.factor_detector, worksheet=ws_fact) 248 | # interaction detector 249 | ws_inter = gd_xls.add_sheet('Interaction detector') 250 | if len_x > 1: 251 | inter_q = self.interaction_detector['q'] 252 | inter_sig = self.interaction_detector['sig'] 253 | interactions = self.interaction_detector['interaction'] 254 | ws_inter.write_merge(0, 0, 0, 0 + len_x, "q-statistic") 255 | ws_inter.write(1, 0, style=str_style) 256 | ws_inter.write_merge(len_x + 3, len_x + 3, 0, 0 + len_x, "Sig F test: 0.05") 257 | ws_inter.write(len_x + 4, 0, style=str_style) 258 | ws_inter.write_merge(2 * len_x + 6, 2 * len_x + 6, 0, 0, "Interaction between Xs") 259 | ws_inter.write(2 * len_x + 7, 0, style=str_style) 260 | ws_inter.write(2 * len_x + 7, 1, "q-value", style=str_style) 261 | ws_inter.write(2 * len_x + 7, 2, "interaction", style=str_style) 262 | # write q-statistic values 263 | row_inter = 0 264 | for ind, val in enumerate(self.x_names): 265 | ws_inter.write(1, ind + 1, val, style=str_style) 266 | ws_inter.write(ind + 2, 0, val, style=str_style) 267 | ws_inter.write(len_x + 4, ind + 1, val, style=str_style) 268 | ws_inter.write(len_x + ind + 5, 0, val, style=str_style) 269 | for inter_col in range(0, ind): 270 | ws_inter.write(ind + 2, inter_col + 1, inter_q.iloc[ind, inter_col], style=num_style) 271 | ws_inter.write(ind + len_x + 5, inter_col + 1, inter_sig.iloc[ind, inter_col], style=num_style) 272 | for sp_j in range(ind, len_x): 273 | ws_inter.write(ind + 2, sp_j + 1, style=str_style) 274 | ws_inter.write(ind + len_x + 5, sp_j + 1, style=str_style) 275 | 276 | for ind2, val2 in enumerate(self.x_names[ind + 1:]): 277 | ws_inter.write(2 * len_x + row_inter + 8, 0, val + "&" + val2, style=str_style) 278 | ws_inter.write(2 * len_x + row_inter + 8, 1, interactions.iloc[row_inter, 0], style=num_style) 279 | ws_inter.write(2 * len_x + row_inter + 8, 2, interactions.iloc[row_inter, 1], style=num_style) 280 | row_inter = row_inter + 1 281 | # insert image 282 | img_bmp = (os.path.join(gd_path, 'interaction_fig/interaction.bmp')) 283 | fig_row = 2 * len_x + row_inter + 11 284 | ws_inter.insert_bitmap(img_bmp, fig_row, 0) 285 | 286 | # ecological detector 287 | ws_ecol = gd_xls.add_sheet('Ecological detector') 288 | ws_ecol.write_merge(1, 1, 0, 3, "Sig. F-test: 0.05") 289 | eco_df = self.ecological_detector 290 | ws_ecol.write(2, 0, style=str_style) 291 | for ind, val in enumerate(self.x_names): 292 | ws_ecol.write(2, 0 + ind + 1, val, style=str_style) 293 | ws_ecol.write(ind + 3, 0, val, style=str_style) 294 | for j in range(ind): 295 | ws_ecol.write(ind + 3, j + 1, bool(eco_df.iloc[ind, j]), style=str_style) 296 | for sp_j in range(ind, len_x): 297 | ws_ecol.write(ind + 3, sp_j + 1, style=str_style) 298 | 299 | gd_xls.save(save_file) 300 | 301 | def print_result(self, printFile=None): 302 | 303 | if printFile is None: 304 | self._print_result() 305 | else: 306 | with open(printFile, 'w') as outfile: 307 | self._print_result(printFile=outfile) 308 | 309 | def _print_result(self, printFile=None): 310 | """ 311 | """ 312 | risk_result = self.risk_detector 313 | len_x = len(self.x_names) 314 | print("-------------------risk results--------------------", file=printFile) 315 | for i in self.x_names: 316 | print("%s - risk :" % i, file=printFile) 317 | print(risk_result[i]['risk'], file=printFile) 318 | print("%s - ttest :" % i, file=printFile) 319 | print(risk_result[i]['ttest_stra'], file=printFile) 320 | print('------------------factor detector------------------', file=printFile) 321 | print(self.factor_detector, file=printFile) 322 | 323 | if len_x > 1: 324 | print('--------------interaction detector-----------------', file=printFile) 325 | print(self.interaction_detector['q'], file=printFile) 326 | print(self.interaction_detector['interaction'], file=printFile) 327 | print('--------------ecological detector------------------', file=printFile) 328 | print(self.ecological_detector, file=printFile) 329 | 330 | @staticmethod 331 | def _xls_write_df(df, worksheet, startrow=0, startcol=0): 332 | col_names = df.columns.values 333 | index = df.index 334 | 335 | for col_ind, col_name in enumerate(col_names): 336 | worksheet.write(startrow, startcol + col_ind + 1, col_name) 337 | for row_data, cell in enumerate(df[col_name]): 338 | worksheet.write(startrow + row_data + 1, startcol + col_ind + 1, cell) 339 | 340 | for index_ind, index_name in enumerate(index): 341 | worksheet.write(startrow + 1 + index_ind, startcol, index_name) 342 | 343 | return worksheet 344 | 345 | 346 | if __name__ == '__main__': 347 | testdata = pd.read_csv("../data/collectdata.csv") 348 | gd_result = GeoDetector(testdata, ["watershed", "soiltype", "elevation"], "incidence", alpha=0.05) 349 | gd_result.save_to_xls("test.xls") 350 | -------------------------------------------------------------------------------- /gd_core/interaction_fig/interaction.bmp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsnrguo/QGIS-Geographical-detector/a13a14be3ca952ed9ae170d08d486e5217f6a591/gd_core/interaction_fig/interaction.bmp -------------------------------------------------------------------------------- /gd_core/optimal_stratification.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2021/12/29 16:44 4 | # @Author : gjg 5 | # @Site : 6 | # @File : optimal_gd.py 7 | # @Software: PyCharm 8 | import warnings 9 | 10 | import numpy as np 11 | import pandas as pd 12 | from math import log 13 | from scipy import stats 14 | from copy import deepcopy 15 | 16 | 17 | # import pandas as pd 18 | 19 | 20 | class optimal_geo_detector: 21 | """ 22 | 23 | """ 24 | 25 | def __init__(self, x, y, 26 | criterion="squared_error", 27 | max_group=None, 28 | min_group=1, 29 | min_samples_group=2, 30 | cv_seed=None, 31 | min_delta_q=0.001, 32 | cv_fold=10, 33 | cv_times=1, 34 | lst_alpha=0.05, 35 | equal_data=None, 36 | equ_threshold=0, 37 | max_sample=None, 38 | sample_random_seed=None, 39 | ccp_alpha=0.0): 40 | """ 41 | 42 | Args: 43 | x: 44 | y: 45 | criterion: 46 | max_group: 47 | min_group: 48 | min_samples_group: 49 | cv_seed: 50 | min_delta_q: 51 | cv_fold: 52 | cv_times: 53 | lst_alpha: 54 | equal_data: equality data 55 | equ_threshold: equality ratio in each groups, [0,1] 56 | ccp_alpha: 57 | """ 58 | 59 | self.equ_threshold = equ_threshold 60 | if (cv_times > 1) & (cv_seed is not None): 61 | self.cv_seed = [cv_seed + i for i in range(cv_times)] 62 | else: 63 | self.cv_seed = [cv_seed] 64 | self.criterion = criterion 65 | self.min_group = min_group 66 | 67 | # 68 | len_pop = len(y) 69 | self.pop_y = y 70 | self.pop_x = x 71 | 72 | if max_sample is not None: 73 | sam_rng = np.random.default_rng(seed=sample_random_seed) 74 | sam_index = sam_rng.choice(range(len_pop), size=max_sample, replace=False) 75 | self.x = x[sam_index] 76 | self.y = y[sam_index] 77 | self.y = y[sam_index] 78 | self.equal_data = equal_data[sam_index] 79 | else: 80 | self.x = x 81 | self.y = y 82 | self.equal_data = equal_data 83 | 84 | if max_group is None: 85 | self.max_group = int(len(self.y) / min_samples_group) 86 | else: 87 | self.max_group = max_group 88 | 89 | self.min_samples_split = min_samples_group * 3 90 | self.min_samples_group = min_samples_group 91 | self.cv = cv_fold 92 | self.cv_time = cv_times 93 | if min_group == max_group: 94 | self.fix_group = min_group 95 | else: 96 | self.fix_group = None 97 | 98 | if self.fix_group is None: 99 | self.lst_alpha = lst_alpha 100 | else: 101 | self.lst_alpha = 1 102 | if self.fix_group * min_samples_group > len(self.y): 103 | warnings.warn('The fixed number of groups is too lager or the min_samples in groups is too larger') 104 | 105 | self.ccp_alpha = ccp_alpha 106 | self.ccp_alpha_ls = [] 107 | self.min_delta_q = min_delta_q 108 | 109 | self.sst = np.sum(pow(self.y - np.mean(self.y), 2)) 110 | 111 | self.split_group, self.metric = self.split(x=self.x, y=self.y, equal_data=self.equal_data) 112 | if len(set(self.split_group)) < self.max_group: 113 | self.max_group = len(set(self.split_group)) 114 | 115 | if (self.max_group != self.min_group) & (self.criterion == 'squared_error'): 116 | self.alpha_list = self.lost_complexity_info() 117 | else: 118 | self.alpha_list = pd.DataFrame([]) 119 | 120 | self.groups = self.stratification() 121 | if max_sample is not None: 122 | self.groups = self.predict_strata(self.groups, self.x, self.pop_x) 123 | 124 | self.group_interval, self.sort_labs, self.lab_info = self.groups2interval() 125 | 126 | # @property 127 | def stratification(self): 128 | """ 129 | stratification function 130 | """ 131 | metric = self.metric 132 | f_node = metric['node'].to_numpy() 133 | delta_q = metric['delta_q'].to_numpy() 134 | groups_best = self.split_group 135 | # cc_lost_list = dict.fromkeys(['del_node', 'num_group', 'q_list', 'sse', 'cc_lost_best']) 136 | if self.max_group == self.min_group: 137 | if self.criterion == 'squared_error': 138 | groups_best, cc_lost_list = self.fix_pruning_group(self.split_group, f_node, delta_q) 139 | if (self.criterion == 'linear_statistic') & (self.max_group < len(set(self.split_group))): 140 | groups_best, cc_lost_list = self.fix_pruning_group(self.split_group, f_node, delta_q) 141 | else: 142 | if self.criterion == 'squared_error': 143 | if self.alpha_list.shape[0] > 1: 144 | groups_best = self.alpha_list.loc[self.alpha_list['cv_error'] == min(self.alpha_list['cv_error']), 145 | 'group'] 146 | groups_best = groups_best.values[-1] 147 | else: 148 | groups_best = self.alpha_list['group'].values[-1] 149 | 150 | return groups_best 151 | 152 | def split(self, x, y, equal_data=None): 153 | """ 154 | split the x into groups minimizing the y 155 | Args: 156 | equal_data: equality data 157 | x: 158 | y: 159 | 160 | Returns: 161 | groups: the group labels of unit 162 | metric_list: the split information 163 | 164 | """ 165 | rank_index = np.argsort(x) 166 | y = y[rank_index] 167 | x = x[rank_index] 168 | if equal_data is not None: 169 | equal_data = equal_data[rank_index] 170 | alt_cut = np.diff(np.insert(x, 0, x[0])) != 0 # the same x should not be split 171 | group_list = [np.arange(len(y))] 172 | groups = np.ones(len(y)).astype(int) 173 | group_lab = [1] # initial group lab 174 | metric_list = list() 175 | len_new_node = 1 176 | # delta_q = 1 177 | while len_new_node > 0: 178 | new_node = list() 179 | lab_index = 0 # initial group labels index 180 | update_lab = list() 181 | for group_index in group_list: 182 | y_len = len(group_index) 183 | y_val = np.mean(y[group_index]) 184 | y_val_var = np.var(y[group_index]) / y_len 185 | sst = np.sum(pow((y[group_index]) - y_val, 2)) 186 | if len(group_index) >= self.min_samples_split: 187 | if (equal_data is not None) and (self.equ_threshold > 0): 188 | equal_group = equal_data[group_index] 189 | else: 190 | equal_group = None 191 | sub_cut, metric = self._split_cut(y[group_index], alt_cut[group_index], pop_data=equal_group) 192 | if sub_cut is not None: 193 | delta_q = (sst - metric) / self.sst 194 | local_q = (sst - metric) / sst 195 | if delta_q < self.min_delta_q: 196 | node_type = "leaf_node" 197 | x_cut = np.nan 198 | else: 199 | node_type = "split_node" 200 | x_cut = x[group_index][sub_cut] 201 | new_node.append(group_index[:sub_cut]) 202 | new_node.append(group_index[sub_cut:]) 203 | update_lab.append(2 * group_lab[lab_index]) # the new left label after split 204 | update_lab.append(2 * group_lab[lab_index] + 1) # the new right label after split 205 | # update groups 206 | groups[group_index[:sub_cut]] = 2 * group_lab[lab_index] 207 | groups[group_index[sub_cut:]] = 2 * group_lab[lab_index] + 1 208 | 209 | metric_list.append( 210 | (group_lab[lab_index], sst, delta_q, x_cut, y_len, y_val, y_val_var, node_type, local_q)) 211 | else: # alt_cut point is null 212 | x_cut = np.nan 213 | local_q = np.nan 214 | node_type = "leaf_node" 215 | delta_q = self.min_delta_q 216 | metric_list.append( 217 | (group_lab[lab_index], sst, delta_q, x_cut, y_len, y_val, y_val_var, node_type, local_q)) 218 | else: 219 | x_cut = np.nan 220 | local_q = np.nan 221 | node_type = "leaf_node" 222 | delta_q = self.min_delta_q 223 | metric_list.append( 224 | (group_lab[lab_index], sst, delta_q, x_cut, y_len, y_val, y_val_var, node_type, local_q)) 225 | 226 | lab_index += 1 227 | 228 | len_new_node = len(new_node) 229 | group_lab = update_lab 230 | group_list = new_node 231 | # group_num = len(set(groups)) 232 | 233 | metric_dict = dict.fromkeys(['node', 'sst', 'delta_q', 'next_split', 'length', 'estimate', 'est_var', 234 | 'node_type', 'local_q']) 235 | metric_dict['node'] = np.array([i[0] for i in metric_list], dtype=int) 236 | metric_dict['sst'] = np.array([i[1] for i in metric_list]) 237 | metric_dict['delta_q'] = np.array([i[2] for i in metric_list]) 238 | metric_dict['next_split'] = np.array([i[3] for i in metric_list]) 239 | metric_dict['length'] = np.array([i[4] for i in metric_list]) 240 | metric_dict['estimate'] = np.array([i[5] for i in metric_list]) 241 | metric_dict['est_var'] = np.array([i[6] for i in metric_list]) 242 | metric_dict['node_type'] = np.array([i[7] for i in metric_list]) 243 | metric_dict['local_q'] = np.array([i[8] for i in metric_list]) 244 | 245 | recover_rank = np.argsort(rank_index) 246 | return groups[recover_rank], pd.DataFrame(metric_dict) 247 | 248 | def fix_pruning_group(self, groups, nodes, delta_q): 249 | """ 250 | 251 | Args: 252 | groups: list 253 | nodes: list 254 | delta_q: the delta-q-statistic of each split 255 | 256 | Returns: 257 | groups, the resultant groups 258 | group_info, the pruning information 259 | 260 | """ 261 | cc_lost = np.array(delta_q) * self.sst 262 | terminal_node = list(set(groups)) 263 | terminal_node.sort() 264 | group_num = len(terminal_node) 265 | alt_pruning = (np.array(terminal_node) / 2).astype(int) 266 | pruning_node = alt_pruning[:-1][np.diff(alt_pruning) == 0] 267 | 268 | del_node_ls = [0] 269 | num_group = [group_num] 270 | q_list = [np.sum(delta_q)] 271 | while group_num > self.min_group: 272 | alt_del_index = np.in1d(nodes, pruning_node) 273 | alt_index = np.arange(len(nodes))[alt_del_index] 274 | del_index = np.argmin(cc_lost[alt_index]) 275 | del_node = pruning_node[del_index] 276 | c1 = groups == del_node * 2 277 | c2 = groups == del_node * 2 + 1 278 | merge_index = [c1[i] or c2[i] for i in range(len(c1))] 279 | groups[merge_index] = del_node 280 | # update node, split_metric cc_lost and pruning_node 281 | del_index_raw = alt_index[del_index] 282 | nodes = np.delete(nodes, del_index_raw) 283 | delta_q = np.delete(delta_q, del_index_raw) 284 | cc_lost = np.delete(cc_lost, del_index_raw) 285 | pruning_node = np.delete(pruning_node, del_index) 286 | # update terminal_node 287 | terminal_node.remove(del_node * 2) 288 | terminal_node.remove(del_node * 2 + 1) 289 | terminal_node.insert(0, del_node) 290 | 291 | # update pruning_node 292 | if del_node % 2 == 0: 293 | if (del_node + 1) in terminal_node: 294 | pruning_node = np.append(pruning_node, int(del_node / 2)) 295 | else: 296 | if (del_node - 1) in terminal_node: 297 | pruning_node = np.append(pruning_node, int(del_node / 2)) 298 | 299 | pruning_node.sort() 300 | del_node_ls.append(del_node) 301 | num_group.append(group_num - 1) 302 | q_list.append(np.sum(delta_q)) 303 | 304 | group_num -= 1 305 | 306 | sse = self.sst * (1 - np.array(q_list)) 307 | cc_lost_best = sse + self.ccp_alpha * np.array(num_group) 308 | group_info = {"del_node": del_node_ls, "num_group": num_group, 309 | "q_list": q_list, 310 | "sse": sse, 311 | "cc_lost_best": cc_lost_best} 312 | 313 | return groups, group_info 314 | 315 | def lost_complexity_info(self): 316 | """ 317 | lost complexity information 318 | 319 | Returns: 320 | pandas dataframe: 321 | node: the merging node 322 | group_num: the number of group after merging the node 323 | merging_in_node: inner node of node and leaf node, contains the node itself 324 | groups: groups after merging the node 325 | cv_error: rmse of k-fold cv 326 | cv_std: std of k-fold, it is std(rmse)/k-fold 327 | """ 328 | split_info = self.metric 329 | nodes = split_info.loc[split_info['node_type'] == 'split_node', 'node'].to_numpy() 330 | groups = deepcopy(self.split_group) 331 | alpha_list = [0] 332 | group_ls = [self.split_group] 333 | group_num = [len(set(groups))] 334 | num_group = group_num[0] 335 | merging_node = [0] 336 | merge_in_node = [0] 337 | sse = self.sum_squared_error(groups, self.y) 338 | q_list = [1 - sse / self.sst] 339 | 340 | while num_group > self.min_group: 341 | merge_node, alpha, del_node, merging_index = self._regula_para(nodes, groups, self.y, self.sst) 342 | if merge_node == 0: 343 | break 344 | else: 345 | nodes = list(set(nodes) - del_node) 346 | groups[merging_index] = merge_node 347 | num_group = len(set(groups)) 348 | if num_group <= self.max_group: 349 | merging_node.append(merge_node) 350 | merge_in_node.append(del_node) 351 | alpha_list.append(alpha) 352 | group_ls.append(deepcopy(groups)) 353 | group_num.append(num_group) 354 | sse = self.sum_squared_error(groups, self.y) 355 | q_list.append(1 - sse / self.sst) 356 | 357 | if len(set(self.split_group)) > self.max_group: 358 | alpha_info = dict(node=merging_node[1:], q_value=q_list[1:], group_num=group_num[1:], 359 | merge_in_node=merge_in_node[1:], 360 | alpha=alpha_list[1:], group=group_ls[1:]) 361 | else: 362 | alpha_info = dict(node=merging_node, q_value=q_list, group_num=group_num, merge_in_node=merge_in_node, 363 | alpha=alpha_list, group=group_ls) 364 | 365 | alpha_info = pd.DataFrame(alpha_info) 366 | alpha_info.sort_values(by=['alpha']) 367 | if alpha_info.shape[0] > 1: 368 | cv_alpha = alpha_info['alpha'].to_numpy() 369 | cv_error = 0 370 | cv_std = 0 371 | for cv in range(self.cv_time): 372 | if self.cv_seed is None: 373 | cv_info = self.lc_cv_info(cv_alpha) 374 | else: 375 | cv_info = self.lc_cv_info(cv_alpha, seed=self.cv_seed[cv]) 376 | cv_error += np.array(cv_info['cv_error']) 377 | cv_std += np.array(cv_info['cv_std']) 378 | alpha_info['cv_error'] = cv_error / self.cv_time 379 | alpha_info['cv_std'] = cv_std / self.cv_time 380 | 381 | return alpha_info 382 | 383 | def lc_cv_info(self, cv_alpha, seed=None): 384 | """ 385 | the alpha value of lost-complexity k-fold cross-validation information 386 | Args: 387 | cv_alpha: cv parameter 388 | seed: random seed 389 | 390 | Returns: 391 | cv precision 392 | root mean of squared error 393 | std of k-fold root mean of squared error 394 | 395 | """ 396 | len_y = len(self.y) 397 | interval = int(np.around(len_y / self.cv)) 398 | # cv 399 | rng = np.random.default_rng(seed=seed) 400 | random_index = rng.choice(range(len_y), size=len_y, replace=False) 401 | cv_cut = [range(i * interval, (i + 1) * interval) for i in range(self.cv - 1)] 402 | cv_cut.append(range(interval * (self.cv - 1), len_y)) 403 | cv_index = [random_index[i] for i in cv_cut] 404 | option_cut = range(len_y) 405 | 406 | root_mse_info = [] 407 | root_mse_std = [] 408 | 409 | for alpha in cv_alpha: 410 | root_mse = [] 411 | for i in cv_index: 412 | select = np.in1d(option_cut, i) 413 | train_y, test_y = self.y[~select], self.y[select] 414 | train_x, test_x = self.x[~select], self.x[select] 415 | if self.equal_data is not None: 416 | pop_train = self.equal_data[~select] 417 | else: 418 | pop_train = None 419 | groups, split_info = self.split(train_x, train_y, equal_data=pop_train) 420 | nodes = split_info.loc[split_info['node_type'] == 'split_node', 'node'].to_numpy() 421 | cut_groups = self._cv_info(train_y, groups, nodes, alpha) 422 | mse_i = self.predict(cut_groups, train_x, train_y, test_x, test_y) 423 | root_mse.append(pow(mse_i, 0.5)) 424 | 425 | mean_cv = np.mean(root_mse) 426 | std_cv = np.std(root_mse) / pow(self.cv, 0.5) 427 | 428 | root_mse_info.append(mean_cv) 429 | root_mse_std.append(std_cv) 430 | 431 | return dict(cv_error=root_mse_info, cv_std=root_mse_std) 432 | 433 | def _cv_info(self, train_y, groups, nodes, cut_alpha): 434 | """ 435 | 436 | Args: 437 | train_y: 438 | groups: 439 | nodes: 440 | cut_alpha: the preset regularization parameter 441 | 442 | Returns: 443 | 444 | """ 445 | 446 | mean_y = np.mean(train_y) 447 | sst = np.sum(pow((train_y - mean_y), 2)) 448 | group_num = len(set(groups)) 449 | while group_num > self.min_group: 450 | merge_node, alpha, del_node, merging_index = self._regula_para(nodes, groups, train_y, sst) 451 | if merge_node == 0: 452 | break 453 | else: 454 | if alpha > cut_alpha: 455 | break 456 | nodes = list(set(nodes) - del_node) 457 | groups[merging_index] = merge_node 458 | group_num = len(set(groups)) 459 | 460 | return groups 461 | 462 | def _regula_para(self, nodes, groups, y, sst): 463 | """ 464 | 465 | Args: 466 | nodes: 467 | groups: 468 | y: 469 | sst: 470 | 471 | Returns: 472 | 473 | """ 474 | alpha = np.inf 475 | merge_node = 0 476 | merging_index = [] 477 | del_node = [] 478 | group_num = len(set(groups)) 479 | for node in nodes[1:]: 480 | _, del_node_i, group_index = self.sub_node(node, groups, nodes) 481 | alpha_temp = self._lc_ssb(groups[group_index], y[group_index]) 482 | alpha_temp = alpha_temp / sst 483 | group_temp = group_num - len(set(groups[group_index])) + 1 484 | if group_temp >= self.min_group: 485 | if (alpha_temp < alpha) or ((alpha == alpha_temp) & (node > merge_node)): 486 | merge_node = node 487 | alpha = alpha_temp 488 | del_node = del_node_i 489 | merging_index = group_index 490 | 491 | return merge_node, alpha, del_node, merging_index 492 | 493 | @staticmethod 494 | def sub_node(node, groups, all_node): 495 | """ 496 | extract the leaf node and its index, child-node and grandchild-node of a given father node 497 | Args: 498 | node: inner node 499 | groups: 500 | all_node: 501 | 502 | Returns: 503 | leaves: list, the leaves of node 504 | sub_in_node: set, the inner node of node 505 | sub_node_index: array, the index of node groups 506 | 507 | """ 508 | row_index = np.arange(len(groups)) 509 | max_depth = int(log(np.max(groups), 2)) 510 | node_depth = int(log(node, 2)) 511 | group_node = [node] 512 | for i in range(node_depth + 1, max_depth + 1): 513 | diff_dep = i - node_depth 514 | group_node = np.append(group_node, 515 | np.arange(node * pow(2, diff_dep), node * pow(2, diff_dep) + pow(2, diff_dep))) 516 | 517 | group_node = set(group_node) 518 | # print(group_node) 519 | leaves = list(group_node & set(groups)) 520 | # leaves is none 521 | sub_in_node = group_node & set(all_node) 522 | sub_node_index = row_index[groups == leaves[0]] 523 | for i in leaves[1::]: 524 | sub_node_index = np.append(sub_node_index, row_index[groups == i]) 525 | 526 | return leaves, sub_in_node, sub_node_index 527 | 528 | # @staticmethod 529 | def _lc_ssb(self, node_groups, node_y): 530 | """ 531 | The between sum squared (ssb) error after running merging procedure 532 | cc_alpha = (sst(node) - sse(node))/((len(node_groups) - 1)* sst(all)) 533 | Args: 534 | node_groups: sub—group 535 | node_y: 536 | 537 | Returns: 538 | cc_alpha: 539 | 540 | """ 541 | mean_y = np.mean(node_y) 542 | sst = np.sum(pow((node_y - mean_y), 2)) 543 | sse = self.sum_squared_error(node_groups, node_y) 544 | lc_ssb = (sst - sse) / (len(set(node_groups)) - 1) # 545 | 546 | return lc_ssb 547 | 548 | @staticmethod 549 | def sum_squared_error(groups, y): 550 | """ 551 | sum squared error of grouping 552 | Args: 553 | groups: 554 | y: 555 | 556 | Returns: 557 | sse: sum squared error 558 | 559 | """ 560 | group_labs = np.unique(groups) 561 | sse = 0 562 | for i in group_labs: 563 | y_group = y[groups == i] 564 | mean_i = np.mean(y_group) 565 | sse += np.sum(pow((y_group - mean_i), 2)) 566 | 567 | return sse 568 | 569 | # @staticmethod 570 | def predict(self, groups, train_x, train_y, test_x, test_y): 571 | """ 572 | prediction accuracy 573 | Args: 574 | groups: the train_x groups 575 | train_x: 576 | train_y: 577 | test_x: 578 | test_y: 579 | 580 | Returns: 581 | MSE 582 | 583 | """ 584 | 585 | test_group = self.predict_strata(groups, train_x, test_x) 586 | # 587 | train_pd = pd.DataFrame({"group": groups, "x": train_x, "train_y": train_y}) 588 | test_df = pd.DataFrame({"group": test_group, "x": test_x, "test_y": test_y}) 589 | pred = train_pd.groupby("group").mean()["train_y"].reset_index() 590 | test_df = test_df.join(pred.set_index("group"), on="group") 591 | test_df["error"] = pow(test_df["test_y"] - test_df["train_y"], 2) 592 | mse = np.sum(test_df["error"]) 593 | 594 | return mse 595 | 596 | def _split_cut(self, split_y, alt_cut, pop_data=None, optimal_met=None): 597 | """ 598 | get the best split cut by exhaustive search 599 | Args: 600 | split_y: the raw index of y after sorting by x 601 | alt_cut: the possible split points that x is not same 602 | optimal_met: None 603 | 604 | Returns: 605 | best_cut: the index of cut point, means the split is x < x[cut] 606 | 607 | """ 608 | # global optimal_met 609 | len_y = len(split_y) 610 | min_sg = self.min_samples_group 611 | max_sg = len_y - self.min_samples_group 612 | 613 | if pop_data is not None: 614 | threshold = np.sum(pop_data) * self.equ_threshold 615 | # the first pos that cumsum > equ_threshold 616 | min_index = np.arange(len_y)[np.cumsum(pop_data) >= threshold][0] 617 | max_index = len_y - np.arange(len_y)[np.cumsum(pop_data[::-1]) >= threshold][0] 618 | min_sg = np.max([min_sg, min_index]) 619 | max_sg = np.min([max_sg, max_index]) 620 | 621 | split_index = np.arange(min_sg, max_sg) 622 | alt_cut = alt_cut[split_index] 623 | split_index = split_index[alt_cut] 624 | 625 | if self.criterion == 'squared_error': 626 | optimal_met = np.inf 627 | elif self.criterion == "linear_statistic": 628 | optimal_met = self.lst_alpha 629 | best_cut = None 630 | # @nb.jit() # using numba accelerate 631 | for cut in split_index: 632 | metric = self._criterion_metric(cut, split_y) 633 | if metric < optimal_met: 634 | optimal_met = metric 635 | best_cut = cut 636 | 637 | return best_cut, optimal_met 638 | 639 | def _criterion_metric(self, cut, y): 640 | """ 641 | criterion of split 642 | Args: 643 | cut: cut the raw data into 2 split 644 | y: the data 645 | 646 | Returns: 647 | metric: the metric index 648 | 649 | """ 650 | metric = None 651 | if self.criterion == 'squared_error': 652 | metric = np.sum(pow(y[0:cut] - np.mean(y[0:cut]), 2)) + np.sum(pow(y[cut::] - np.mean(y[cut::]), 2)) 653 | 654 | if self.criterion == "linear_statistic": 655 | tx = np.ones(len(y)) 656 | tx[cut::] = 0 657 | _, metric = self._linear_statistic(tx, y) 658 | 659 | return metric 660 | 661 | @staticmethod 662 | def _linear_statistic(tx, fy): 663 | """ 664 | linear statistics of MC 665 | Args: 666 | tx: transform function of x 667 | fy: influence function of y 668 | 669 | Returns: 670 | 671 | """ 672 | len_y = len(fy) 673 | t = np.dot(tx, fy) 674 | e_h = np.mean(fy) 675 | e_t = np.sum(tx) * e_h 676 | sigma_h = np.dot(fy - e_h, fy - e_h) / len_y 677 | sigma_t = len_y / (len_y - 1) * sigma_h * np.dot(tx, tx) - 1 / (len_y - 1) * sigma_h * pow(np.sum(tx), 2) 678 | test_t = abs((t - e_t) / pow(sigma_t, 0.5)) 679 | two_side_p = stats.norm.sf(test_t) * 2 # 1d-cdf 680 | return test_t, two_side_p 681 | 682 | def groups2interval(self): 683 | """ 684 | convert the groups into interval 685 | Returns: 686 | 687 | """ 688 | rank_index = np.argsort(self.pop_x) 689 | recover_rank = np.argsort(rank_index) 690 | group_x = self.pop_x[rank_index] 691 | group_info = self.groups[rank_index] 692 | labels, in1, in2, counts = np.unique(group_info, return_index=True, return_inverse=True, return_counts=True, 693 | axis=None) 694 | breaks = group_x[in1] 695 | breaks_sort = np.argsort(breaks) 696 | breaks_recover = np.argsort(breaks_sort) 697 | edges = np.append(breaks[breaks_sort], group_x[-1]) 698 | 699 | if type(breaks[0]) is int: 700 | edges = [str(x) for x in edges] 701 | else: 702 | edges = ["{:.2f}".format(x) for x in edges] 703 | max_width = max([len(edge) for edge in edges]) 704 | k = len(edges) - 1 705 | left = ["["] 706 | left.extend("[" * (k - 1)) 707 | right = ")" * (k - 1) + "]" 708 | lower = ["{:>{width}}".format(edges[i], width=max_width) for i in range(k)] 709 | upper = ["{:>{width}}".format(edges[i], width=max_width) for i in range(1, k + 1)] 710 | lower = [l + r for l, r in zip(left, lower)] 711 | upper = [l + r for l, r in zip(upper, right)] 712 | intervals = [l + ", " + r for l, r in zip(lower, upper)] 713 | sort_lab = np.arange(1, len(labels) + 1) 714 | intervals = np.array(intervals)[breaks_recover] 715 | sort_lab = sort_lab[breaks_recover] 716 | lab_info = pd.DataFrame(dict(node=labels, num_lab=sort_lab, intervals=intervals, counts=counts)) 717 | 718 | inter_labs = intervals[in2] 719 | sort_labs = sort_lab[in2] 720 | 721 | return inter_labs[recover_rank], sort_labs[recover_rank], lab_info 722 | 723 | @staticmethod 724 | def predict_strata(group_x, x, new_x): 725 | """ 726 | predict strata labels based on sampling data 727 | Returns: 728 | groups 729 | 730 | """ 731 | low_breaks = list() 732 | up_breaks = list() 733 | group_labs, group_index = np.unique(group_x, return_index=True) 734 | for i in group_labs: 735 | x_group = x[group_x == i] 736 | low_breaks.append(np.min(x_group)) 737 | up_breaks.append(np.max(x_group)) 738 | 739 | low_breaks = np.array(low_breaks) 740 | low_breaks.sort() 741 | up_breaks = np.array(up_breaks) 742 | up_breaks.sort() 743 | # update the breaks, the interval is (-inf, b1), [b1,b2),..., [bk,inf) 744 | breaks = (low_breaks[1:] + up_breaks[:-1]) / 2 745 | breaks = breaks.astype(float) 746 | breaks = np.insert(breaks, 0, -np.inf) 747 | breaks = np.append(breaks, np.max([np.max(x), np.max(new_x)])) 748 | new_x_labs = pd.cut(np.array(new_x), breaks) 749 | x_labs = pd.cut(np.array(x), breaks) 750 | group_dict = dict(zip(x_labs[group_index], group_labs)) 751 | # new_x_labs.replace() 752 | pred_group = pd.Series(new_x_labs).replace(group_dict) 753 | return pred_group.to_numpy() 754 | 755 | 756 | if __name__ == '__main__': 757 | testdata = pd.read_csv("../data/test.csv") 758 | x = testdata['ave_temp'].to_numpy() 759 | y = testdata['ave_ndvi'].to_numpy() 760 | pop_data0 = testdata['ave_prec'].to_numpy() 761 | gs_result = optimal_geo_detector(x, y, 762 | criterion="squared_error", 763 | max_group=10, 764 | min_group=3, 765 | min_samples_group=50, 766 | cv_seed=1215, 767 | min_delta_q=0.001, 768 | cv_fold=10, 769 | equal_data=pop_data0, 770 | max_sample=1000, 771 | sample_random_seed=101125, 772 | ccp_alpha=0.0) 773 | print(gs_result.groups) 774 | -------------------------------------------------------------------------------- /geographical_detector.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | /*************************************************************************** 5 | Geo_detector 6 | A QGIS plugin 7 | This plugin adds an algorithm to measure the spatial stratified heter 8 | Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ 9 | ------------------- 10 | begin : 2021-12-21 11 | copyright : (C) 2021 by Guojg 12 | email : guojg@lreis.ac.cn 13 | ***************************************************************************/ 14 | 15 | /*************************************************************************** 16 | * * 17 | * This program is free software; you can redistribute it and/or modify * 18 | * it under the terms of the GNU General Public License as published by * 19 | * the Free Software Foundation; either version 2 of the License, or * 20 | * (at your option) any later version. * 21 | * * 22 | ***************************************************************************/ 23 | """ 24 | 25 | __author__ = 'Guojg' 26 | __date__ = '2021-12-21' 27 | __copyright__ = '(C) 2021 by Guojg' 28 | 29 | # This will get replaced with a git SHA1 when you do a git archive 30 | 31 | __revision__ = '$Format:%H$' 32 | 33 | import os 34 | import sys 35 | import inspect 36 | 37 | from qgis import processing 38 | from qgis.PyQt.QtWidgets import QAction 39 | from qgis.PyQt.QtGui import QIcon 40 | 41 | from qgis.core import QgsApplication 42 | from .geographical_detector_provider import Geo_detectorProvider 43 | 44 | cmd_folder = os.path.split(inspect.getfile(inspect.currentframe()))[0] 45 | 46 | if cmd_folder not in sys.path: 47 | sys.path.insert(0, cmd_folder) 48 | 49 | 50 | class Geo_detectorPlugin(object): 51 | 52 | def __init__(self, iface): 53 | self.provider = None 54 | self.iface = iface 55 | 56 | def initProcessing(self): 57 | """Init Processing provider for QGIS >= 3.8.""" 58 | self.provider = Geo_detectorProvider() 59 | QgsApplication.processingRegistry().addProvider(self.provider) 60 | 61 | def initGui(self): 62 | self.initProcessing() 63 | 64 | icon = os.path.join(os.path.join(cmd_folder, 'icon.png')) 65 | self.action = QAction( 66 | QIcon(icon), 67 | u"Geographical detector", self.iface.mainWindow()) 68 | self.action.triggered.connect(self.run) 69 | self.iface.addPluginToMenu(u"&Geographical detector", self.action) 70 | self.iface.addToolBarIcon(self.action) 71 | 72 | def unload(self): 73 | # We will also need to add code to the unload method, to remove these elements when plugin is removed. 74 | QgsApplication.processingRegistry().removeProvider(self.provider) 75 | self.iface.removePluginMenu(u"&Geographical detector", self.action) 76 | self.iface.removeToolBarIcon(self.action) 77 | 78 | def run(self): 79 | processing.execAlgorithmDialog("Geographical detector") 80 | 81 | -------------------------------------------------------------------------------- /geographical_detector_algorithm.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | /*************************************************************************** 5 | Geo_detector 6 | A QGIS plugin 7 | This plugin adds an algorithm to measure the spatial stratified heter 8 | Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ 9 | ------------------- 10 | begin : 2021-12-21 11 | copyright : (C) 2021 by Guojg 12 | email : guojg@lreis.ac.cn 13 | ***************************************************************************/ 14 | 15 | /*************************************************************************** 16 | * * 17 | * This program is free software; you can redistribute it and/or modify * 18 | * it under the terms of the GNU General Public License as published by * 19 | * the Free Software Foundation; either version 2 of the License, or * 20 | * (at your option) any later version. * 21 | * * 22 | ***************************************************************************/ 23 | """ 24 | 25 | __author__ = 'Guojg' 26 | __date__ = '2021-12-21' 27 | __copyright__ = '(C) 2021 by Guojg' 28 | 29 | # This will get replaced with a git SHA1 when you do a git archive 30 | 31 | __revision__ = '$Format:%H$' 32 | 33 | # import os 34 | import numpy as np 35 | import pandas as pd 36 | # import pydevd_pycharm 37 | from qgis.PyQt.QtCore import QCoreApplication 38 | # from qgis.core import QgsProcessingException 39 | from qgis.core import (QgsProcessing, 40 | QgsProcessingAlgorithm, 41 | QgsProcessingParameterFeatureSource, 42 | QgsProcessingParameterFileDestination, 43 | QgsProcessingParameterField, 44 | QgsProcessingMultiStepFeedback, 45 | QgsProcessingParameterNumber, 46 | QgsProcessingParameterDefinition, 47 | QgsProcessingException) 48 | 49 | from gd_core import geodetector 50 | from gd_core import optimal_stratification as op_st 51 | 52 | 53 | class Geo_detectorAlgorithm(QgsProcessingAlgorithm): 54 | """ 55 | The geo-detector models 56 | 57 | All Processing algorithms should extend the QgsProcessingAlgorithm 58 | class. 59 | """ 60 | 61 | OUTPUT = 'OUTPUT' 62 | INPUT = 'INPUT' 63 | VALUES_FIELD_NAME = 'VALUES_FIELD_NAME' 64 | CATEGORICAL_FIELD_NAME = 'CATEGORIES_FIELD_NAME' 65 | NUMERICAL_FIELD_NAME = 'NUMERICAL_FIELD_NAME' 66 | MIN_GROUP = 'MIN_GROUP' 67 | MAX_GROUP = 'MAX_GROUP' 68 | MIN_SAMPLES_GROUP = 'MIN_SAMPLES_GROUP' 69 | MIN_SAMPLES_SPLIT = 'MIN_SAMPLES_SPLIT' 70 | EQUALITY_FIELD_NAME = "EQUALITY_FIELD_NAME" 71 | MINIMUM_RATIO = "MINIMUM_RATIO" 72 | CV_SIZE = 'CV_SIZE' 73 | CV_SEED = 'SV_SEED' 74 | CV_TIMES = 'CV_TIMES' 75 | INTERPOLATION_DATA = 'INTERPOLATION_DATA' 76 | IMPROVING_Q = 'IMPROVING_Q' 77 | MAX_SAMPLE = 'MAX_SAMPLE' 78 | 79 | def __init__(self): 80 | super().__init__() 81 | self.criterion = ["Sum squared error", "Significance test"] 82 | self.strata_num = ['Fixed number', 'Optimal'] 83 | 84 | # self.VALUES_FIELD_NAME = None 85 | 86 | def initAlgorithm(self, config=None): 87 | """ 88 | Here we define the inputs and output of the algorithm, along 89 | with some other properties. 90 | """ 91 | self.addParameter( 92 | QgsProcessingParameterFeatureSource( 93 | self.INPUT, 94 | self.tr('Input layer'), 95 | [QgsProcessing.TypeVectorAnyGeometry] 96 | ) 97 | ) 98 | # add choosing the study field name 99 | self.addParameter(QgsProcessingParameterField(self.VALUES_FIELD_NAME, 100 | self.tr( 101 | 'Study variable'), 102 | parentLayerParameterName=self.INPUT)) 103 | 104 | self.addParameter(QgsProcessingParameterField(self.CATEGORICAL_FIELD_NAME, 105 | self.tr('Field(s) with categories'), 106 | parentLayerParameterName=self.INPUT, 107 | type=QgsProcessingParameterField.Any, allowMultiple=True, 108 | optional=True)) 109 | 110 | # add choosing the field(s) with numerical data 111 | self.addParameter(QgsProcessingParameterField(self.NUMERICAL_FIELD_NAME, 112 | self.tr('Field(s) with numeric'), 113 | parentLayerParameterName=self.INPUT, 114 | type=QgsProcessingParameterField.Any, allowMultiple=True, 115 | optional=True)) 116 | # advanced parameters 117 | max_groups = QgsProcessingParameterNumber(self.MAX_GROUP, 118 | self.tr('Maximum number of groups'), minValue=2, defaultValue=20, 119 | optional=True) 120 | max_groups.setFlags(max_groups.flags() | QgsProcessingParameterDefinition.FlagAdvanced) 121 | self.addParameter(max_groups) 122 | 123 | min_groups = QgsProcessingParameterNumber(self.MIN_GROUP, 124 | self.tr('Minimum number of groups'), minValue=1, defaultValue=2, 125 | optional=True) 126 | min_groups.setFlags(min_groups.flags() | QgsProcessingParameterDefinition.FlagAdvanced) 127 | self.addParameter(min_groups) 128 | 129 | min_sample_group = QgsProcessingParameterNumber(self.MIN_SAMPLES_GROUP, 130 | self.tr('Minimum number of samples in a group'), 131 | minValue=2, defaultValue=5, optional=True) 132 | min_sample_group.setFlags(min_sample_group.flags() | QgsProcessingParameterDefinition.FlagAdvanced) 133 | self.addParameter(min_sample_group) 134 | 135 | # add equality constrains field 136 | equality_field = QgsProcessingParameterField(self.EQUALITY_FIELD_NAME, 137 | self.tr('Field for equality constraint'), 138 | parentLayerParameterName=self.INPUT, 139 | type=QgsProcessingParameterField.Any, 140 | optional=True) 141 | equality_field.setFlags(equality_field.flags() | QgsProcessingParameterDefinition.FlagAdvanced) 142 | self.addParameter(equality_field) 143 | 144 | min_ratio = QgsProcessingParameterNumber(self.MINIMUM_RATIO, self.tr('Minimum ratio for equality measure'), 145 | minValue=0, maxValue=1, type=QgsProcessingParameterNumber.Double, 146 | defaultValue=0, optional=True) 147 | min_ratio.setFlags(min_ratio.flags() | QgsProcessingParameterDefinition.FlagAdvanced) 148 | self.addParameter(min_ratio) 149 | 150 | max_sample = QgsProcessingParameterNumber(self.MAX_SAMPLE, self.tr('The number of samples for stratification'), 151 | minValue=0, optional=True) 152 | max_sample.setFlags(max_sample.flags() | QgsProcessingParameterDefinition.FlagAdvanced) 153 | self.addParameter(max_sample) 154 | # self.max_sample = max_sample 155 | 156 | improving_q = QgsProcessingParameterNumber(self.IMPROVING_Q, self.tr('Minimum threshold for q-value increase'), 157 | minValue=0, maxValue=1, type=QgsProcessingParameterNumber.Double, 158 | defaultValue=0, optional=True) 159 | improving_q.setFlags(min_ratio.flags() | QgsProcessingParameterDefinition.FlagAdvanced) 160 | self.addParameter(improving_q) 161 | 162 | cv_size = QgsProcessingParameterNumber(self.CV_SIZE, self.tr('Cross-validation number'), minValue=2, 163 | defaultValue=10) 164 | cv_size.setFlags(cv_size.flags() | QgsProcessingParameterDefinition.FlagAdvanced) 165 | self.addParameter(cv_size) 166 | 167 | cv_seed = QgsProcessingParameterNumber(self.CV_SEED, self.tr('Cross-validation random state'), minValue=2, 168 | defaultValue=None, optional=True) 169 | cv_seed.setFlags(cv_seed.flags() | QgsProcessingParameterDefinition.FlagAdvanced) 170 | self.addParameter(cv_seed) 171 | 172 | cv_times = QgsProcessingParameterNumber(self.CV_TIMES, 173 | self.tr('Times of repeating cross-validation'), minValue=1, 174 | defaultValue=1, optional=True) 175 | cv_times.setFlags(cv_times.flags() | QgsProcessingParameterDefinition.FlagAdvanced) 176 | self.addParameter(cv_times) 177 | # add a file output of type XLSX 178 | self.addParameter( 179 | QgsProcessingParameterFileDestination( 180 | self.OUTPUT, 181 | self.tr('Output File'), 182 | 'MS Excel format (*.xls);;TXT files (*.txt)', 183 | ) 184 | ) 185 | 186 | # add help/introduction windows 187 | def shortDescription(self): # pylint: disable=missing-docstring 188 | desc_file = "Q-GD “QGIS-geographical detector” is a statistical tool to measure Spatial Stratified " \ 189 | "Heterogeneity(SSH) and test the coupling between two variables Y (Study variable) and X " \ 190 | "(Explanatory variable), according to their SSHs, without assumption of linearity of the " \ 191 | "association. In " \ 192 | "GeoDetector model, the Study variable is numerical variable and the explanatory variable must be " \ 193 | "categorical. " \ 194 | "If an explanatory variable is numerical it should be transformed to be categorical." 195 | return self.tr(desc_file) 196 | 197 | def processAlgorithm(self, parameters, context, feedback): 198 | """ 199 | Here is where the processing itself takes place. 200 | """ 201 | source = self.parameterAsSource(parameters, self.INPUT, context) 202 | if source is None: 203 | raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT)) 204 | # 205 | value_field_name = self.parameterAsString(parameters, self.VALUES_FIELD_NAME, context) 206 | category_field_names = self.parameterAsFields(parameters, self.CATEGORICAL_FIELD_NAME, context) 207 | numerical_field_names = self.parameterAsFields(parameters, self.NUMERICAL_FIELD_NAME, context) 208 | output_res = self.parameterAsFileOutput(parameters, self.OUTPUT, context) 209 | 210 | max_group = self.parameterAsInt(parameters, self.MAX_GROUP, context) 211 | min_group = self.parameterAsInt(parameters, self.MIN_GROUP, context) 212 | min_sample = self.parameterAsInt(parameters, self.MIN_SAMPLES_GROUP, context) 213 | pop_field = self.parameterAsString(parameters, self.EQUALITY_FIELD_NAME, context) 214 | pop_threshold = self.parameterAsDouble(parameters, self.MINIMUM_RATIO, context) 215 | inc_q = self.parameterAsDouble(parameters, self.IMPROVING_Q, context) 216 | cv_fold = self.parameterAsInt(parameters, self.CV_SIZE, context) 217 | cv_random_seed = self.parameterAsInt(parameters, self.CV_SEED, context) 218 | cv_rep = self.parameterAsInt(parameters, self.CV_TIMES, context) 219 | stra_samp = self.parameterAsInt(parameters, self.MAX_SAMPLE, context) 220 | 221 | feedback = QgsProcessingMultiStepFeedback(2, feedback) 222 | # update the advance parameters 223 | # import pydevd_pycharm 224 | # pydevd_pycharm.settrace('localhost', port=1010, stdoutToServer=True, stderrToServer=True) 225 | total_sample = source.featureCount() 226 | if max_group == 0: 227 | max_group = None 228 | if min_group == 0: 229 | min_group = 2 230 | if min_sample == 0: 231 | min_group = 5 232 | if cv_random_seed == 0: 233 | cv_random_seed = None 234 | if stra_samp > total_sample: 235 | feedback.pushWarning(self.tr('\nWarnings: The number of samples for stratification exceeds maximum, and is ' 236 | 'reset to feature count')) 237 | stra_samp = total_sample 238 | if stra_samp in [0, total_sample]: 239 | stra_samp = None 240 | 241 | # get features from source 242 | total = 100.0 / total_sample if total_sample else 0 243 | features = source.getFeatures() 244 | 245 | if (len(category_field_names) == 0) & (len(numerical_field_names) == 0): 246 | raise ValueError("'Field(s) with categories' and 'Field(s) with numeric' cannot both be empty") 247 | else: 248 | cols = category_field_names + numerical_field_names 249 | cols.insert(0, value_field_name) 250 | if pop_field != "": 251 | cols.insert(0, pop_field) 252 | 253 | # create the geo-detector raw data 254 | data_gen = ([f[col] for col in cols] for f in features) 255 | df = pd.DataFrame.from_records(data=data_gen, columns=cols) 256 | 257 | # checking the input data 258 | y = df[value_field_name] 259 | if not pd.api.types.is_numeric_dtype(y): 260 | raise TypeError('Study variable is not of a numeric type') 261 | if pop_field != "": 262 | pop_data = df[pop_field] 263 | if not pd.api.types.is_numeric_dtype(pop_data): 264 | raise TypeError('Equality variable is not of a numeric type') 265 | pop_data = pop_data.to_numpy() 266 | else: 267 | pop_data = None 268 | 269 | # stratification 270 | if len(numerical_field_names) > 0: 271 | for x in numerical_field_names: 272 | if not pd.api.types.is_numeric_dtype(df[x]): 273 | feedback.reportError( 274 | self.tr('\nERROR: Field ' + x + 'is not of a numeric type. We cannot continue...\n')) 275 | raise TypeError('Field ' + x + 'is not of a numeric type') 276 | else: 277 | xdata = df[x] 278 | gd_x = op_st.optimal_geo_detector(x=xdata.to_numpy(), y=y.to_numpy(), min_group=min_group, 279 | min_samples_group=min_sample, 280 | max_group=max_group, equal_data=pop_data, 281 | equ_threshold=pop_threshold, 282 | cv_seed=cv_random_seed, cv_fold=cv_fold, cv_times=cv_rep, 283 | max_sample=stra_samp, 284 | min_delta_q=inc_q) 285 | x_group = gd_x.group_interval 286 | cat_x_name = 'Cat_' + x 287 | df[cat_x_name] = x_group 288 | category_field_names.append(cat_x_name) 289 | 290 | row_shape = df.shape[0] 291 | for cate_name in category_field_names: 292 | uniq_val, freq_val = np.unique(df[cate_name], return_counts=True) 293 | # if the unique value is more than 1/3 length of df 294 | if len(freq_val) > (row_shape / 3): 295 | feedback.reportError(self.tr('\nERROR: There are too many groups in ' + cate_name + 296 | '. We cannot continue...\n')) 297 | raise TypeError('There are too many groups in ' + cate_name + '.') 298 | if any(freq_val == 1): 299 | feedback.pushWarning(self.tr('\nWarnings: The group ' + str(uniq_val[freq_val == 1][0]) + ' of ' + 300 | cate_name + ' has only one element\n')) 301 | # import pydevd_pycharm 302 | # pydevd_pycharm.settrace('localhost', port=10112, stdoutToServer=True, stderrToServer=True) 303 | gd_result = geodetector.GeoDetector(df, category_field_names, value_field_name) 304 | 305 | if output_res.split('.')[-1] == 'xls': 306 | gd_result.save_to_xls(output_res) 307 | 308 | elif output_res.split('.')[-1] == 'txt': 309 | gd_result.print_result(output_res) 310 | 311 | return {self.OUTPUT: output_res} 312 | 313 | def name(self): 314 | """ 315 | Returns the algorithm name, used for identifying the algorithm. This 316 | string should be fixed for the algorithm, and must not be localised. 317 | The name should be unique within each provider. Names should contain 318 | lowercase alphanumeric characters only and no spaces or other 319 | formatting characters. 320 | """ 321 | return 'Q_GD' 322 | 323 | def displayName(self): 324 | """ 325 | Returns the translated algorithm name, which should be used for any 326 | user-visible display of the algorithm name. 327 | """ 328 | return self.tr(self.name()) 329 | 330 | def group(self): 331 | """ 332 | Returns the name of the group this algorithm belongs to. This string 333 | should be localised. 334 | """ 335 | return self.tr(self.groupId()) 336 | 337 | def groupId(self): 338 | """ 339 | Returns the unique ID of the group this algorithm belongs to. This 340 | string should be fixed for the algorithm, and must not be localised. 341 | The group id should be unique within each provider. Group id should 342 | contain lowercase alphanumeric characters only and no spaces or other 343 | formatting characters. 344 | """ 345 | return '' 346 | 347 | def tr(self, string): 348 | return QCoreApplication.translate('Processing', string) 349 | 350 | def createInstance(self): 351 | return Geo_detectorAlgorithm() 352 | -------------------------------------------------------------------------------- /geographical_detector_provider.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | /*************************************************************************** 5 | Geo_detector 6 | A QGIS plugin 7 | This plugin adds an algorithm to measure the spatial stratified heter 8 | Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ 9 | ------------------- 10 | begin : 2021-12-21 11 | copyright : (C) 2021 by Guojg 12 | email : guojg@lreis.ac.cn 13 | ***************************************************************************/ 14 | 15 | /*************************************************************************** 16 | * * 17 | * This program is free software; you can redistribute it and/or modify * 18 | * it under the terms of the GNU General Public License as published by * 19 | * the Free Software Foundation; either version 2 of the License, or * 20 | * (at your option) any later version. * 21 | * * 22 | ***************************************************************************/ 23 | """ 24 | 25 | __author__ = 'Guojg' 26 | __date__ = '2021-12-21' 27 | __copyright__ = '(C) 2021 by Guojg' 28 | 29 | # This will get replaced with a git SHA1 when you do a git archive 30 | 31 | __revision__ = '$Format:%H$' 32 | 33 | import os 34 | import inspect 35 | from qgis.PyQt.QtGui import QIcon 36 | 37 | from qgis.core import QgsProcessingProvider 38 | 39 | from .geographical_detector_algorithm import Geo_detectorAlgorithm 40 | 41 | 42 | class Geo_detectorProvider(QgsProcessingProvider): 43 | 44 | def __init__(self): 45 | """ 46 | Default constructor. 47 | """ 48 | QgsProcessingProvider.__init__(self) 49 | 50 | def unload(self): 51 | """ 52 | Unloads the provider. Any tear-down steps required by the provider 53 | should be implemented here. 54 | """ 55 | pass 56 | 57 | def loadAlgorithms(self): 58 | """ 59 | Loads all algorithms belonging to this provider. 60 | """ 61 | self.addAlgorithm(Geo_detectorAlgorithm()) 62 | # self.addAlgorithm(Stratification_gd()) 63 | # self.addAlgorithm(RandomPointsPolygons()) 64 | # Stratification_gd 65 | 66 | def id(self): 67 | """ 68 | Returns the unique provider id, used for identifying the provider. This 69 | string should be a unique, short, character only string, eg "qgis" or 70 | "gdal". This string should not be localised. 71 | """ 72 | return 'Geographical detector' 73 | 74 | def name(self): 75 | """ 76 | Returns the provider name, which is used to describe the provider 77 | within the GUI. 78 | 79 | This string should be short (e.g. "Lastools") and localised. 80 | """ 81 | return self.tr('Geographical detector') 82 | 83 | def icon(self): 84 | """ 85 | Should return a QIcon which is used for your provider inside 86 | the Processing toolbox. 87 | """ 88 | # return QgsProcessingProvider.icon(self) default icon 89 | cmd_folder = os.path.split(inspect.getfile(inspect.currentframe()))[0] 90 | icon = QIcon(os.path.join(os.path.join(cmd_folder, 'icon.png'))) 91 | return icon 92 | 93 | def longName(self): 94 | """ 95 | Returns the a longer version of the provider name, which can include 96 | extra details such as version numbers. E.g. "Lastools LIDAR tools 97 | (version 2.2.1)". This string should be localised. The default 98 | implementation returns the same string as name(). 99 | """ 100 | return self.name() 101 | -------------------------------------------------------------------------------- /help/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 14 | 15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest 16 | 17 | help: 18 | @echo "Please use \`make ' where is one of" 19 | @echo " html to make standalone HTML files" 20 | @echo " dirhtml to make HTML files named index.html in directories" 21 | @echo " singlehtml to make a single large HTML file" 22 | @echo " pickle to make pickle files" 23 | @echo " json to make JSON files" 24 | @echo " htmlhelp to make HTML files and a HTML help project" 25 | @echo " qthelp to make HTML files and a qthelp project" 26 | @echo " devhelp to make HTML files and a Devhelp project" 27 | @echo " epub to make an epub" 28 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 29 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 30 | @echo " text to make text files" 31 | @echo " man to make manual pages" 32 | @echo " changes to make an overview of all changed/added/deprecated items" 33 | @echo " linkcheck to check all external links for integrity" 34 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 35 | 36 | clean: 37 | -rm -rf $(BUILDDIR)/* 38 | 39 | html: 40 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 41 | @echo 42 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 43 | 44 | dirhtml: 45 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 48 | 49 | singlehtml: 50 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 51 | @echo 52 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 53 | 54 | pickle: 55 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 56 | @echo 57 | @echo "Build finished; now you can process the pickle files." 58 | 59 | json: 60 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 61 | @echo 62 | @echo "Build finished; now you can process the JSON files." 63 | 64 | htmlhelp: 65 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 66 | @echo 67 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 68 | ".hhp project file in $(BUILDDIR)/htmlhelp." 69 | 70 | qthelp: 71 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 72 | @echo 73 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 74 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 75 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/template_class.qhcp" 76 | @echo "To view the help file:" 77 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/template_class.qhc" 78 | 79 | devhelp: 80 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 81 | @echo 82 | @echo "Build finished." 83 | @echo "To view the help file:" 84 | @echo "# mkdir -p $$HOME/.local/share/devhelp/template_class" 85 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/template_class" 86 | @echo "# devhelp" 87 | 88 | epub: 89 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 90 | @echo 91 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 92 | 93 | latex: 94 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 95 | @echo 96 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 97 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 98 | "(use \`make latexpdf' here to do that automatically)." 99 | 100 | latexpdf: 101 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 102 | @echo "Running LaTeX files through pdflatex..." 103 | make -C $(BUILDDIR)/latex all-pdf 104 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 105 | 106 | text: 107 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 108 | @echo 109 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 110 | 111 | man: 112 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 113 | @echo 114 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 115 | 116 | changes: 117 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 118 | @echo 119 | @echo "The overview file is in $(BUILDDIR)/changes." 120 | 121 | linkcheck: 122 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 123 | @echo 124 | @echo "Link check complete; look for any errors in the above output " \ 125 | "or in $(BUILDDIR)/linkcheck/output.txt." 126 | 127 | doctest: 128 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 129 | @echo "Testing of doctests in the sources finished, look at the " \ 130 | "results in $(BUILDDIR)/doctest/output.txt." 131 | -------------------------------------------------------------------------------- /help/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source 10 | if NOT "%PAPER%" == "" ( 11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 12 | ) 13 | 14 | if "%1" == "" goto help 15 | 16 | if "%1" == "help" ( 17 | :help 18 | echo.Please use `make ^` where ^ is one of 19 | echo. html to make standalone HTML files 20 | echo. dirhtml to make HTML files named index.html in directories 21 | echo. singlehtml to make a single large HTML file 22 | echo. pickle to make pickle files 23 | echo. json to make JSON files 24 | echo. htmlhelp to make HTML files and a HTML help project 25 | echo. qthelp to make HTML files and a qthelp project 26 | echo. devhelp to make HTML files and a Devhelp project 27 | echo. epub to make an epub 28 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 29 | echo. text to make text files 30 | echo. man to make manual pages 31 | echo. changes to make an overview over all changed/added/deprecated items 32 | echo. linkcheck to check all external links for integrity 33 | echo. doctest to run all doctests embedded in the documentation if enabled 34 | goto end 35 | ) 36 | 37 | if "%1" == "clean" ( 38 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 39 | del /q /s %BUILDDIR%\* 40 | goto end 41 | ) 42 | 43 | if "%1" == "html" ( 44 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 45 | echo. 46 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 47 | goto end 48 | ) 49 | 50 | if "%1" == "dirhtml" ( 51 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 52 | echo. 53 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 54 | goto end 55 | ) 56 | 57 | if "%1" == "singlehtml" ( 58 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 59 | echo. 60 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 61 | goto end 62 | ) 63 | 64 | if "%1" == "pickle" ( 65 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 66 | echo. 67 | echo.Build finished; now you can process the pickle files. 68 | goto end 69 | ) 70 | 71 | if "%1" == "json" ( 72 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 73 | echo. 74 | echo.Build finished; now you can process the JSON files. 75 | goto end 76 | ) 77 | 78 | if "%1" == "htmlhelp" ( 79 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 80 | echo. 81 | echo.Build finished; now you can run HTML Help Workshop with the ^ 82 | .hhp project file in %BUILDDIR%/htmlhelp. 83 | goto end 84 | ) 85 | 86 | if "%1" == "qthelp" ( 87 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 88 | echo. 89 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 90 | .qhcp project file in %BUILDDIR%/qthelp, like this: 91 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\template_class.qhcp 92 | echo.To view the help file: 93 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\template_class.ghc 94 | goto end 95 | ) 96 | 97 | if "%1" == "devhelp" ( 98 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 99 | echo. 100 | echo.Build finished. 101 | goto end 102 | ) 103 | 104 | if "%1" == "epub" ( 105 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 106 | echo. 107 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 108 | goto end 109 | ) 110 | 111 | if "%1" == "latex" ( 112 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 113 | echo. 114 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 115 | goto end 116 | ) 117 | 118 | if "%1" == "text" ( 119 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 120 | echo. 121 | echo.Build finished. The text files are in %BUILDDIR%/text. 122 | goto end 123 | ) 124 | 125 | if "%1" == "man" ( 126 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 127 | echo. 128 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 129 | goto end 130 | ) 131 | 132 | if "%1" == "changes" ( 133 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 134 | echo. 135 | echo.The overview file is in %BUILDDIR%/changes. 136 | goto end 137 | ) 138 | 139 | if "%1" == "linkcheck" ( 140 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 141 | echo. 142 | echo.Link check complete; look for any errors in the above output ^ 143 | or in %BUILDDIR%/linkcheck/output.txt. 144 | goto end 145 | ) 146 | 147 | if "%1" == "doctest" ( 148 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 149 | echo. 150 | echo.Testing of doctests in the sources finished, look at the ^ 151 | results in %BUILDDIR%/doctest/output.txt. 152 | goto end 153 | ) 154 | 155 | :end 156 | -------------------------------------------------------------------------------- /help/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Geo_detector documentation build configuration file, created by 4 | # sphinx-quickstart on Sun Feb 12 17:11:03 2012. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | import sys, os 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | #sys.path.insert(0, os.path.abspath('.')) 20 | 21 | # -- General configuration ----------------------------------------------------- 22 | 23 | # If your documentation needs a minimal Sphinx version, state it here. 24 | #needs_sphinx = '1.0' 25 | 26 | # Add any Sphinx extension module names here, as strings. They can be extensions 27 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 28 | extensions = ['sphinx.ext.todo', 'sphinx.ext.imgmath', 'sphinx.ext.viewcode'] 29 | 30 | # Add any paths that contain templates here, relative to this directory. 31 | templates_path = ['_templates'] 32 | 33 | # The suffix of source filenames. 34 | source_suffix = '.rst' 35 | 36 | # The encoding of source files. 37 | #source_encoding = 'utf-8-sig' 38 | 39 | # The master toctree document. 40 | master_doc = 'index' 41 | 42 | # General information about the project. 43 | project = u'Geo_detector' 44 | copyright = u'2013, Guojg' 45 | 46 | # The version info for the project you're documenting, acts as replacement for 47 | # |version| and |release|, also used in various other places throughout the 48 | # built documents. 49 | # 50 | # The short X.Y version. 51 | version = '0.1' 52 | # The full version, including alpha/beta/rc tags. 53 | release = '0.1' 54 | 55 | # The language for content autogenerated by Sphinx. Refer to documentation 56 | # for a list of supported languages. 57 | #language = None 58 | 59 | # There are two options for replacing |today|: either, you set today to some 60 | # non-false value, then it is used: 61 | #today = '' 62 | # Else, today_fmt is used as the format for a strftime call. 63 | #today_fmt = '%B %d, %Y' 64 | 65 | # List of patterns, relative to source directory, that match files and 66 | # directories to ignore when looking for source files. 67 | exclude_patterns = [] 68 | 69 | # The reST default role (used for this markup: `text`) to use for all documents. 70 | #default_role = None 71 | 72 | # If true, '()' will be appended to :func: etc. cross-reference text. 73 | #add_function_parentheses = True 74 | 75 | # If true, the current module name will be prepended to all description 76 | # unit titles (such as .. function::). 77 | #add_TemplateModuleNames = True 78 | 79 | # If true, sectionauthor and moduleauthor directives will be shown in the 80 | # output. They are ignored by default. 81 | #show_authors = False 82 | 83 | # The name of the Pygments (syntax highlighting) style to use. 84 | pygments_style = 'sphinx' 85 | 86 | # A list of ignored prefixes for module index sorting. 87 | #modindex_common_prefix = [] 88 | 89 | 90 | # -- Options for HTML output --------------------------------------------------- 91 | 92 | # The theme to use for HTML and HTML Help pages. See the documentation for 93 | # a list of builtin themes. 94 | html_theme = 'default' 95 | 96 | # Theme options are theme-specific and customize the look and feel of a theme 97 | # further. For a list of options available for each theme, see the 98 | # documentation. 99 | #html_theme_options = {} 100 | 101 | # Add any paths that contain custom themes here, relative to this directory. 102 | #html_theme_path = [] 103 | 104 | # The name for this set of Sphinx documents. If None, it defaults to 105 | # " v documentation". 106 | #html_title = None 107 | 108 | # A shorter title for the navigation bar. Default is the same as html_title. 109 | #html_short_title = None 110 | 111 | # The name of an image file (relative to this directory) to place at the top 112 | # of the sidebar. 113 | #html_logo = None 114 | 115 | # The name of an image file (within the static path) to use as favicon of the 116 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 117 | # pixels large. 118 | #html_favicon = None 119 | 120 | # Add any paths that contain custom static files (such as style sheets) here, 121 | # relative to this directory. They are copied after the builtin static files, 122 | # so a file named "default.css" will overwrite the builtin "default.css". 123 | html_static_path = ['_static'] 124 | 125 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 126 | # using the given strftime format. 127 | #html_last_updated_fmt = '%b %d, %Y' 128 | 129 | # If true, SmartyPants will be used to convert quotes and dashes to 130 | # typographically correct entities. 131 | #html_use_smartypants = True 132 | 133 | # Custom sidebar templates, maps document names to template names. 134 | #html_sidebars = {} 135 | 136 | # Additional templates that should be rendered to pages, maps page names to 137 | # template names. 138 | #html_additional_pages = {} 139 | 140 | # If false, no module index is generated. 141 | #html_domain_indices = True 142 | 143 | # If false, no index is generated. 144 | #html_use_index = True 145 | 146 | # If true, the index is split into individual pages for each letter. 147 | #html_split_index = False 148 | 149 | # If true, links to the reST sources are added to the pages. 150 | #html_show_sourcelink = True 151 | 152 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 153 | #html_show_sphinx = True 154 | 155 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 156 | #html_show_copyright = True 157 | 158 | # If true, an OpenSearch description file will be output, and all pages will 159 | # contain a tag referring to it. The value of this option must be the 160 | # base URL from which the finished HTML is served. 161 | #html_use_opensearch = '' 162 | 163 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 164 | #html_file_suffix = None 165 | 166 | # Output file base name for HTML help builder. 167 | htmlhelp_basename = 'TemplateClassdoc' 168 | 169 | 170 | # -- Options for LaTeX output -------------------------------------------------- 171 | 172 | # The paper size ('letter' or 'a4'). 173 | #latex_paper_size = 'letter' 174 | 175 | # The font size ('10pt', '11pt' or '12pt'). 176 | #latex_font_size = '10pt' 177 | 178 | # Grouping the document tree into LaTeX files. List of tuples 179 | # (source start file, target name, title, author, documentclass [howto/manual]). 180 | latex_documents = [ 181 | ('index', 'Geo_detector.tex', u'Geo_detector Documentation', 182 | u'Guojg', 'manual'), 183 | ] 184 | 185 | # The name of an image file (relative to this directory) to place at the top of 186 | # the title page. 187 | #latex_logo = None 188 | 189 | # For "manual" documents, if this is true, then toplevel headings are parts, 190 | # not chapters. 191 | #latex_use_parts = False 192 | 193 | # If true, show page references after internal links. 194 | #latex_show_pagerefs = False 195 | 196 | # If true, show URL addresses after external links. 197 | #latex_show_urls = False 198 | 199 | # Additional stuff for the LaTeX preamble. 200 | #latex_preamble = '' 201 | 202 | # Documents to append as an appendix to all manuals. 203 | #latex_appendices = [] 204 | 205 | # If false, no module index is generated. 206 | #latex_domain_indices = True 207 | 208 | 209 | # -- Options for manual page output -------------------------------------------- 210 | 211 | # One entry per manual page. List of tuples 212 | # (source start file, name, description, authors, manual section). 213 | man_pages = [ 214 | ('index', 'TemplateClass', u'Geo_detector Documentation', 215 | [u'Guojg'], 1) 216 | ] 217 | -------------------------------------------------------------------------------- /help/source/index.rst: -------------------------------------------------------------------------------- 1 | .. Geo_detector documentation master file, created by 2 | sphinx-quickstart on Sun Feb 12 17:11:03 2012. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Geo_detector's documentation! 7 | ============================================ 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | Indices and tables 15 | ================== 16 | 17 | * :ref:`genindex` 18 | * :ref:`modindex` 19 | * :ref:`search` 20 | 21 | -------------------------------------------------------------------------------- /i18n/af.ts: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | @default 5 | 6 | 7 | Good morning 8 | Goeie more 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsnrguo/QGIS-Geographical-detector/a13a14be3ca952ed9ae170d08d486e5217f6a591/icon.png -------------------------------------------------------------------------------- /image/Q_GD GUI.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsnrguo/QGIS-Geographical-detector/a13a14be3ca952ed9ae170d08d486e5217f6a591/image/Q_GD GUI.png -------------------------------------------------------------------------------- /image/Snipaste_2022-04-01_12-18-20.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsnrguo/QGIS-Geographical-detector/a13a14be3ca952ed9ae170d08d486e5217f6a591/image/Snipaste_2022-04-01_12-18-20.png -------------------------------------------------------------------------------- /metadata.txt: -------------------------------------------------------------------------------- 1 | # This file contains metadata for your plugin. 2 | 3 | # This file should be included when you package your plugin.# Mandatory items: 4 | 5 | [general] 6 | name=Geographical detector 7 | qgisMinimumVersion=3.0 8 | description=A tool for measuring spatial stratified heterogeneity and spatial associations of geographical attributes 9 | version=1.3.0 10 | author=Jiangang Guo; Jinfeng Wang 11 | email=guojg@lreis.ac.cn 12 | 13 | about=Spatial Stratified Heterogeneity (SSH) refers to the phenomena that the within strata are more similar than the between strata.Examples are land-use types and climate zones in spatial data. Geographical detector is a statistical tool to measure SSH and to make attribution for/by SSH; (1) measure and find SSH among data; (2) test the coupling between two variables Y and X and (3) investigate interaction between two explanatory variables X1 and X2 to a response variable Y. 14 | 15 | tracker=https://github.com/GuoXiaoyin/QGIS-Geographical-detector/issues 16 | repository=https://github.com/GuoXiaoyin/QGIS-Geographical-detector 17 | # End of mandatory metadata 18 | 19 | # Recommended items: 20 | 21 | hasProcessingProvider=yes 22 | # Uncomment the following line and add your changelog: 23 | # changelog= 24 | 25 | # Tags are comma separated with spaces allowed 26 | tags=python, spatial analysis, stratification, spatial heterogeneity, geographical detector 27 | 28 | homepage=https://plugins.qgis.org/plugins/geographical-detector/ 29 | category=Analysis 30 | icon=icon.png 31 | # experimental flag 32 | experimental=False 33 | 34 | # deprecated flag (applies to the whole plugin, not just a single version) 35 | deprecated=False 36 | 37 | # Since QGIS 3.8, a comma separated list of plugins to be installed 38 | # (or upgraded) can be specified. 39 | # Check the documentation for more information. 40 | # plugin_dependencies= 41 | 42 | Category of the plugin: Raster, Vector, Database or Web 43 | # category=Vector 44 | 45 | # If the plugin can run on QGIS Server. 46 | server=False 47 | 48 | -------------------------------------------------------------------------------- /pb_tool.cfg: -------------------------------------------------------------------------------- 1 | #/*************************************************************************** 2 | # Geo_detector 3 | # 4 | # Configuration file for plugin builder tool (pb_tool) 5 | # Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ 6 | # ------------------- 7 | # begin : 2021-12-21 8 | # copyright : (C) 2021 by Guojg 9 | # email : guojg@lreis.ac.cn 10 | # ***************************************************************************/ 11 | # 12 | #/*************************************************************************** 13 | # * * 14 | # * This program is free software; you can redistribute it and/or modify * 15 | # * it under the terms of the GNU General Public License as published by * 16 | # * the Free Software Foundation; either version 2 of the License, or * 17 | # * (at your option) any later version. * 18 | # * * 19 | # ***************************************************************************/ 20 | # 21 | # 22 | # You can install pb_tool using: 23 | # pip install http://geoapt.net/files/pb_tool.zip 24 | # 25 | # Consider doing your development (and install of pb_tool) in a virtualenv. 26 | # 27 | # For details on setting up and using pb_tool, see: 28 | # http://g-sherman.github.io/plugin_build_tool/ 29 | # 30 | # Issues and pull requests here: 31 | # https://github.com/g-sherman/plugin_build_tool: 32 | # 33 | # Sane defaults for your plugin generated by the Plugin Builder are 34 | # already set below. 35 | # 36 | # As you add Python source files and UI files to your plugin, add 37 | # them to the appropriate [files] section below. 38 | 39 | [plugin] 40 | # Name of the plugin. This is the name of the directory that will 41 | # be created in .qgis2/python/plugins 42 | name: geographical_detector 43 | 44 | # Full path to where you want your plugin directory copied. If empty, 45 | # the QGIS default path will be used. Don't include the plugin name in 46 | # the path. 47 | plugin_path: 48 | 49 | [files] 50 | # Python files that should be deployed with the plugin 51 | python_files: __init__.py geographical_detector.py 52 | 53 | # The main dialog file that is loaded (not compiled) 54 | main_dialog: 55 | 56 | # Other ui files for dialogs you create (these will be compiled) 57 | compiled_ui_files: 58 | 59 | # Resource file(s) that will be compiled 60 | resource_files: 61 | 62 | # Other files required for the plugin 63 | extras: metadata.txt 64 | 65 | # Other directories to be deployed with the plugin. 66 | # These must be subdirectories under the plugin directory 67 | extra_dirs: 68 | 69 | # ISO code(s) for any locales (translations), separated by spaces. 70 | # Corresponding .ts files must exist in the i18n directory 71 | locales: 72 | 73 | [help] 74 | # the built help directory that should be deployed with the plugin 75 | dir: help/build/html 76 | # the name of the directory to target in the deployed plugin 77 | target: help 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /plugin_upload.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | """This script uploads a plugin package to the plugin repository. 4 | Authors: A. Pasotti, V. Picavet 5 | git sha : $TemplateVCSFormat 6 | """ 7 | 8 | import sys 9 | import getpass 10 | import xmlrpc.client 11 | from optparse import OptionParser 12 | 13 | standard_library.install_aliases() 14 | 15 | # Configuration 16 | PROTOCOL = 'https' 17 | SERVER = 'plugins.qgis.org' 18 | PORT = '443' 19 | ENDPOINT = '/plugins/RPC2/' 20 | VERBOSE = False 21 | 22 | 23 | def main(parameters, arguments): 24 | """Main entry point. 25 | 26 | :param parameters: Command line parameters. 27 | :param arguments: Command line arguments. 28 | """ 29 | address = "{protocol}://{username}:{password}@{server}:{port}{endpoint}".format( 30 | protocol=PROTOCOL, 31 | username=parameters.username, 32 | password=parameters.password, 33 | server=parameters.server, 34 | port=parameters.port, 35 | endpoint=ENDPOINT) 36 | print("Connecting to: %s" % hide_password(address)) 37 | 38 | server = xmlrpc.client.ServerProxy(address, verbose=VERBOSE) 39 | 40 | try: 41 | with open(arguments[0], 'rb') as handle: 42 | plugin_id, version_id = server.plugin.upload( 43 | xmlrpc.client.Binary(handle.read())) 44 | print("Plugin ID: %s" % plugin_id) 45 | print("Version ID: %s" % version_id) 46 | except xmlrpc.client.ProtocolError as err: 47 | print("A protocol error occurred") 48 | print("URL: %s" % hide_password(err.url, 0)) 49 | print("HTTP/HTTPS headers: %s" % err.headers) 50 | print("Error code: %d" % err.errcode) 51 | print("Error message: %s" % err.errmsg) 52 | except xmlrpc.client.Fault as err: 53 | print("A fault occurred") 54 | print("Fault code: %d" % err.faultCode) 55 | print("Fault string: %s" % err.faultString) 56 | 57 | 58 | def hide_password(url, start=6): 59 | """Returns the http url with password part replaced with '*'. 60 | 61 | :param url: URL to upload the plugin to. 62 | :type url: str 63 | 64 | :param start: Position of start of password. 65 | :type start: int 66 | """ 67 | start_position = url.find(':', start) + 1 68 | end_position = url.find('@') 69 | return "%s%s%s" % ( 70 | url[:start_position], 71 | '*' * (end_position - start_position), 72 | url[end_position:]) 73 | 74 | 75 | if __name__ == "__main__": 76 | parser = OptionParser(usage="%prog [options] plugin.zip") 77 | parser.add_option( 78 | "-w", "--password", dest="password", 79 | help="Password for plugin site", metavar="******") 80 | parser.add_option( 81 | "-u", "--username", dest="username", 82 | help="Username of plugin site", metavar="user") 83 | parser.add_option( 84 | "-p", "--port", dest="port", 85 | help="Server port to connect to", metavar="80") 86 | parser.add_option( 87 | "-s", "--server", dest="server", 88 | help="Specify server name", metavar="plugins.qgis.org") 89 | options, args = parser.parse_args() 90 | if len(args) != 1: 91 | print("Please specify zip file.\n") 92 | parser.print_help() 93 | sys.exit(1) 94 | if not options.server: 95 | options.server = SERVER 96 | if not options.port: 97 | options.port = PORT 98 | if not options.username: 99 | # interactive mode 100 | username = getpass.getuser() 101 | print("Please enter user name [%s] :" % username, end=' ') 102 | 103 | res = input() 104 | if res != "": 105 | options.username = res 106 | else: 107 | options.username = username 108 | if not options.password: 109 | # interactive mode 110 | options.password = getpass.getpass() 111 | main(options, args) 112 | -------------------------------------------------------------------------------- /pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # Specify a configuration file. 4 | #rcfile= 5 | 6 | # Python code to execute, usually for sys.path manipulation such as 7 | # pygtk.require(). 8 | #init-hook= 9 | 10 | # Profiled execution. 11 | profile=no 12 | 13 | # Add files or directories to the blacklist. They should be base names, not 14 | # paths. 15 | ignore=CVS 16 | 17 | # Pickle collected data for later comparisons. 18 | persistent=yes 19 | 20 | # List of plugins (as comma separated values of python modules names) to load, 21 | # usually to register additional checkers. 22 | load-plugins= 23 | 24 | 25 | [MESSAGES CONTROL] 26 | 27 | # Enable the message, report, category or checker with the given id(s). You can 28 | # either give multiple identifier separated by comma (,) or put this option 29 | # multiple time. See also the "--disable" option for examples. 30 | #enable= 31 | 32 | # Disable the message, report, category or checker with the given id(s). You 33 | # can either give multiple identifiers separated by comma (,) or put this 34 | # option multiple times (only on the command line, not in the configuration 35 | # file where it should appear only once).You can also use "--disable=all" to 36 | # disable everything first and then reenable specific checks. For example, if 37 | # you want to run only the similarities checker, you can use "--disable=all 38 | # --enable=similarities". If you want to run only the classes checker, but have 39 | # no Warning level messages displayed, use"--disable=all --enable=classes 40 | # --disable=W" 41 | # see http://stackoverflow.com/questions/21487025/pylint-locally-defined-disables-still-give-warnings-how-to-suppress-them 42 | disable=locally-disabled,C0103 43 | 44 | 45 | [REPORTS] 46 | 47 | # Set the output format. Available formats are text, parseable, colorized, msvs 48 | # (visual studio) and html. You can also give a reporter class, eg 49 | # mypackage.mymodule.MyReporterClass. 50 | output-format=text 51 | 52 | # Put messages in a separate file for each module / package specified on the 53 | # command line instead of printing them on stdout. Reports (if any) will be 54 | # written in a file name "pylint_global.[txt|html]". 55 | files-output=no 56 | 57 | # Tells whether to display a full report or only the messages 58 | reports=yes 59 | 60 | # Python expression which should return a note less than 10 (10 is the highest 61 | # note). You have access to the variables errors warning, statement which 62 | # respectively contain the number of errors / warnings messages and the total 63 | # number of statements analyzed. This is used by the global evaluation report 64 | # (RP0004). 65 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 66 | 67 | # Add a comment according to your evaluation note. This is used by the global 68 | # evaluation report (RP0004). 69 | comment=no 70 | 71 | # Template used to display messages. This is a python new-style format string 72 | # used to format the message information. See doc for all details 73 | #msg-template= 74 | 75 | 76 | [BASIC] 77 | 78 | # Required attributes for module, separated by a comma 79 | required-attributes= 80 | 81 | # List of builtins function names that should not be used, separated by a comma 82 | bad-functions=map,filter,apply,input 83 | 84 | # Regular expression which should only match correct module names 85 | module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ 86 | 87 | # Regular expression which should only match correct module level names 88 | const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ 89 | 90 | # Regular expression which should only match correct class names 91 | class-rgx=[A-Z_][a-zA-Z0-9]+$ 92 | 93 | # Regular expression which should only match correct function names 94 | function-rgx=[a-z_][a-z0-9_]{2,30}$ 95 | 96 | # Regular expression which should only match correct method names 97 | method-rgx=[a-z_][a-z0-9_]{2,30}$ 98 | 99 | # Regular expression which should only match correct instance attribute names 100 | attr-rgx=[a-z_][a-z0-9_]{2,30}$ 101 | 102 | # Regular expression which should only match correct argument names 103 | argument-rgx=[a-z_][a-z0-9_]{2,30}$ 104 | 105 | # Regular expression which should only match correct variable names 106 | variable-rgx=[a-z_][a-z0-9_]{2,30}$ 107 | 108 | # Regular expression which should only match correct attribute names in class 109 | # bodies 110 | class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ 111 | 112 | # Regular expression which should only match correct list comprehension / 113 | # generator expression variable names 114 | inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ 115 | 116 | # Good variable names which should always be accepted, separated by a comma 117 | good-names=i,j,k,ex,Run,_ 118 | 119 | # Bad variable names which should always be refused, separated by a comma 120 | bad-names=foo,bar,baz,toto,tutu,tata 121 | 122 | # Regular expression which should only match function or class names that do 123 | # not require a docstring. 124 | no-docstring-rgx=__.*__ 125 | 126 | # Minimum line length for functions/classes that require docstrings, shorter 127 | # ones are exempt. 128 | docstring-min-length=-1 129 | 130 | 131 | [MISCELLANEOUS] 132 | 133 | # List of note tags to take in consideration, separated by a comma. 134 | notes=FIXME,XXX,TODO 135 | 136 | 137 | [TYPECHECK] 138 | 139 | # Tells whether missing members accessed in mixin class should be ignored. A 140 | # mixin class is detected if its name ends with "mixin" (case insensitive). 141 | ignore-mixin-members=yes 142 | 143 | # List of classes names for which member attributes should not be checked 144 | # (useful for classes with attributes dynamically set). 145 | ignored-classes=SQLObject 146 | 147 | # When zope mode is activated, add a predefined set of Zope acquired attributes 148 | # to generated-members. 149 | zope=no 150 | 151 | # List of members which are set dynamically and missed by pylint inference 152 | # system, and so shouldn't trigger E0201 when accessed. Python regular 153 | # expressions are accepted. 154 | generated-members=REQUEST,acl_users,aq_parent 155 | 156 | 157 | [VARIABLES] 158 | 159 | # Tells whether we should check for unused import in __init__ files. 160 | init-import=no 161 | 162 | # A regular expression matching the beginning of the name of dummy variables 163 | # (i.e. not used). 164 | dummy-variables-rgx=_$|dummy 165 | 166 | # List of additional names supposed to be defined in builtins. Remember that 167 | # you should avoid to define new builtins when possible. 168 | additional-builtins= 169 | 170 | 171 | [FORMAT] 172 | 173 | # Maximum number of characters on a single line. 174 | max-line-length=80 175 | 176 | # Regexp for a line that is allowed to be longer than the limit. 177 | ignore-long-lines=^\s*(# )??$ 178 | 179 | # Allow the body of an if to be on the same line as the test if there is no 180 | # else. 181 | single-line-if-stmt=no 182 | 183 | # List of optional constructs for which whitespace checking is disabled 184 | no-space-check=trailing-comma,dict-separator 185 | 186 | # Maximum number of lines in a module 187 | max-module-lines=1000 188 | 189 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 190 | # tab). 191 | indent-string=' ' 192 | 193 | 194 | [SIMILARITIES] 195 | 196 | # Minimum lines number of a similarity. 197 | min-similarity-lines=4 198 | 199 | # Ignore comments when computing similarities. 200 | ignore-comments=yes 201 | 202 | # Ignore docstrings when computing similarities. 203 | ignore-docstrings=yes 204 | 205 | # Ignore imports when computing similarities. 206 | ignore-imports=no 207 | 208 | 209 | [IMPORTS] 210 | 211 | # Deprecated modules which should not be used, separated by a comma 212 | deprecated-modules=regsub,TERMIOS,Bastion,rexec 213 | 214 | # Create a graph of every (i.e. internal and external) dependencies in the 215 | # given file (report RP0402 must not be disabled) 216 | import-graph= 217 | 218 | # Create a graph of external dependencies in the given file (report RP0402 must 219 | # not be disabled) 220 | ext-import-graph= 221 | 222 | # Create a graph of internal dependencies in the given file (report RP0402 must 223 | # not be disabled) 224 | int-import-graph= 225 | 226 | 227 | [DESIGN] 228 | 229 | # Maximum number of arguments for function / method 230 | max-args=5 231 | 232 | # Argument names that match this expression will be ignored. Default to name 233 | # with leading underscore 234 | ignored-argument-names=_.* 235 | 236 | # Maximum number of locals for function / method body 237 | max-locals=15 238 | 239 | # Maximum number of return / yield for function / method body 240 | max-returns=6 241 | 242 | # Maximum number of branch for function / method body 243 | max-branches=12 244 | 245 | # Maximum number of statements in function / method body 246 | max-statements=50 247 | 248 | # Maximum number of parents for a class (see R0901). 249 | max-parents=7 250 | 251 | # Maximum number of attributes for a class (see R0902). 252 | max-attributes=7 253 | 254 | # Minimum number of public methods for a class (see R0903). 255 | min-public-methods=2 256 | 257 | # Maximum number of public methods for a class (see R0904). 258 | max-public-methods=20 259 | 260 | 261 | [CLASSES] 262 | 263 | # List of interface methods to ignore, separated by a comma. This is used for 264 | # instance to not check methods defines in Zope's Interface base class. 265 | ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by 266 | 267 | # List of method names used to declare (i.e. assign) instance attributes. 268 | defining-attr-methods=__init__,__new__,setUp 269 | 270 | # List of valid names for the first argument in a class method. 271 | valid-classmethod-first-arg=cls 272 | 273 | # List of valid names for the first argument in a metaclass class method. 274 | valid-metaclass-classmethod-first-arg=mcs 275 | 276 | 277 | [EXCEPTIONS] 278 | 279 | # Exceptions that will emit a warning when being caught. Defaults to 280 | # "Exception" 281 | overgeneral-exceptions=Exception 282 | -------------------------------------------------------------------------------- /scripts/compile-strings.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | LRELEASE=$1 3 | LOCALES=$2 4 | 5 | 6 | for LOCALE in ${LOCALES} 7 | do 8 | echo "Processing: ${LOCALE}.ts" 9 | # Note we don't use pylupdate with qt .pro file approach as it is flakey 10 | # about what is made available. 11 | $LRELEASE i18n/${LOCALE}.ts 12 | done 13 | -------------------------------------------------------------------------------- /scripts/run-env-linux.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | QGIS_PREFIX_PATH=/usr/local/qgis-2.0 4 | if [ -n "$1" ]; then 5 | QGIS_PREFIX_PATH=$1 6 | fi 7 | 8 | echo ${QGIS_PREFIX_PATH} 9 | 10 | 11 | export QGIS_PREFIX_PATH=${QGIS_PREFIX_PATH} 12 | export QGIS_PATH=${QGIS_PREFIX_PATH} 13 | export LD_LIBRARY_PATH=${QGIS_PREFIX_PATH}/lib 14 | export PYTHONPATH=${QGIS_PREFIX_PATH}/share/qgis/python:${QGIS_PREFIX_PATH}/share/qgis/python/plugins:${PYTHONPATH} 15 | 16 | echo "QGIS PATH: $QGIS_PREFIX_PATH" 17 | export QGIS_DEBUG=0 18 | export QGIS_LOG_FILE=/tmp/inasafe/realtime/logs/qgis.log 19 | 20 | export PATH=${QGIS_PREFIX_PATH}/bin:$PATH 21 | 22 | echo "This script is intended to be sourced to set up your shell to" 23 | echo "use a QGIS 2.0 built in $QGIS_PREFIX_PATH" 24 | echo 25 | echo "To use it do:" 26 | echo "source $BASH_SOURCE /your/optional/install/path" 27 | echo 28 | echo "Then use the make file supplied here e.g. make guitest" 29 | -------------------------------------------------------------------------------- /scripts/update-strings.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | LOCALES=$* 3 | 4 | # Get newest .py files so we don't update strings unnecessarily 5 | 6 | CHANGED_FILES=0 7 | PYTHON_FILES=`find . -regex ".*\(ui\|py\)$" -type f` 8 | for PYTHON_FILE in $PYTHON_FILES 9 | do 10 | CHANGED=$(stat -c %Y $PYTHON_FILE) 11 | if [ ${CHANGED} -gt ${CHANGED_FILES} ] 12 | then 13 | CHANGED_FILES=${CHANGED} 14 | fi 15 | done 16 | 17 | # Qt translation stuff 18 | # for .ts file 19 | UPDATE=false 20 | for LOCALE in ${LOCALES} 21 | do 22 | TRANSLATION_FILE="i18n/$LOCALE.ts" 23 | if [ ! -f ${TRANSLATION_FILE} ] 24 | then 25 | # Force translation string collection as we have a new language file 26 | touch ${TRANSLATION_FILE} 27 | UPDATE=true 28 | break 29 | fi 30 | 31 | MODIFICATION_TIME=$(stat -c %Y ${TRANSLATION_FILE}) 32 | if [ ${CHANGED_FILES} -gt ${MODIFICATION_TIME} ] 33 | then 34 | # Force translation string collection as a .py file has been updated 35 | UPDATE=true 36 | break 37 | fi 38 | done 39 | 40 | if [ ${UPDATE} == true ] 41 | # retrieve all python files 42 | then 43 | echo ${PYTHON_FILES} 44 | # update .ts 45 | echo "Please provide translations by editing the translation files below:" 46 | for LOCALE in ${LOCALES} 47 | do 48 | echo "i18n/"${LOCALE}".ts" 49 | # Note we don't use pylupdate with qt .pro file approach as it is flakey 50 | # about what is made available. 51 | pylupdate4 -noobsolete ${PYTHON_FILES} -ts i18n/${LOCALE}.ts 52 | done 53 | else 54 | echo "No need to edit any translation files (.ts) because no python files" 55 | echo "has been updated since the last update translation. " 56 | fi 57 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | # import qgis libs so that ve set the correct sip api version 2 | import qgis # pylint: disable=W0611 # NOQA -------------------------------------------------------------------------------- /test/qgis_interface.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """QGIS plugin implementation. 3 | 4 | .. note:: This program is free software; you can redistribute it and/or modify 5 | it under the terms of the GNU General Public License as published by 6 | the Free Software Foundation; either version 2 of the License, or 7 | (at your option) any later version. 8 | 9 | .. note:: This source code was copied from the 'postgis viewer' application 10 | with original authors: 11 | Copyright (c) 2010 by Ivan Mincik, ivan.mincik@gista.sk 12 | Copyright (c) 2011 German Carrillo, geotux_tuxman@linuxmail.org 13 | Copyright (c) 2014 Tim Sutton, tim@linfiniti.com 14 | 15 | """ 16 | 17 | __author__ = 'tim@linfiniti.com' 18 | __revision__ = '$Format:%H$' 19 | __date__ = '10/01/2011' 20 | __copyright__ = ( 21 | 'Copyright (c) 2010 by Ivan Mincik, ivan.mincik@gista.sk and ' 22 | 'Copyright (c) 2011 German Carrillo, geotux_tuxman@linuxmail.org' 23 | 'Copyright (c) 2014 Tim Sutton, tim@linfiniti.com' 24 | ) 25 | 26 | import logging 27 | from qgis.PyQt.QtCore import QObject, pyqtSlot, pyqtSignal 28 | from qgis.core import QgsMapLayerRegistry 29 | from qgis.gui import QgsMapCanvasLayer 30 | LOGGER = logging.getLogger('QGIS') 31 | 32 | 33 | #noinspection PyMethodMayBeStatic,PyPep8Naming 34 | class QgisInterface(QObject): 35 | """Class to expose QGIS objects and functions to plugins. 36 | 37 | This class is here for enabling us to run unit tests only, 38 | so most methods are simply stubs. 39 | """ 40 | currentLayerChanged = pyqtSignal(QgsMapCanvasLayer) 41 | 42 | def __init__(self, canvas): 43 | """Constructor 44 | :param canvas: 45 | """ 46 | QObject.__init__(self) 47 | self.canvas = canvas 48 | # Set up slots so we can mimic the behaviour of QGIS when layers 49 | # are added. 50 | LOGGER.debug('Initialising canvas...') 51 | # noinspection PyArgumentList 52 | QgsMapLayerRegistry.instance().layersAdded.connect(self.addLayers) 53 | # noinspection PyArgumentList 54 | QgsMapLayerRegistry.instance().layerWasAdded.connect(self.addLayer) 55 | # noinspection PyArgumentList 56 | QgsMapLayerRegistry.instance().removeAll.connect(self.removeAllLayers) 57 | 58 | # For processing module 59 | self.destCrs = None 60 | 61 | @pyqtSlot('QStringList') 62 | def addLayers(self, layers): 63 | """Handle layers being added to the registry so they show up in canvas. 64 | 65 | :param layers: list list of map layers that were added 66 | 67 | .. note:: The QgsInterface api does not include this method, 68 | it is added here as a helper to facilitate testing. 69 | """ 70 | #LOGGER.debug('addLayers called on qgis_interface') 71 | #LOGGER.debug('Number of layers being added: %s' % len(layers)) 72 | #LOGGER.debug('Layer Count Before: %s' % len(self.canvas.layers())) 73 | current_layers = self.canvas.layers() 74 | final_layers = [] 75 | for layer in current_layers: 76 | final_layers.append(QgsMapCanvasLayer(layer)) 77 | for layer in layers: 78 | final_layers.append(QgsMapCanvasLayer(layer)) 79 | 80 | self.canvas.setLayerSet(final_layers) 81 | #LOGGER.debug('Layer Count After: %s' % len(self.canvas.layers())) 82 | 83 | @pyqtSlot('QgsMapLayer') 84 | def addLayer(self, layer): 85 | """Handle a layer being added to the registry so it shows up in canvas. 86 | 87 | :param layer: list list of map layers that were added 88 | 89 | .. note: The QgsInterface api does not include this method, it is added 90 | here as a helper to facilitate testing. 91 | 92 | .. note: The addLayer method was deprecated in QGIS 1.8 so you should 93 | not need this method much. 94 | """ 95 | pass 96 | 97 | @pyqtSlot() 98 | def removeAllLayers(self): 99 | """Remove layers from the canvas before they get deleted.""" 100 | self.canvas.setLayerSet([]) 101 | 102 | def newProject(self): 103 | """Create new project.""" 104 | # noinspection PyArgumentList 105 | QgsMapLayerRegistry.instance().removeAllMapLayers() 106 | 107 | # ---------------- API Mock for QgsInterface follows ------------------- 108 | 109 | def zoomFull(self): 110 | """Zoom to the map full extent.""" 111 | pass 112 | 113 | def zoomToPrevious(self): 114 | """Zoom to previous view extent.""" 115 | pass 116 | 117 | def zoomToNext(self): 118 | """Zoom to next view extent.""" 119 | pass 120 | 121 | def zoomToActiveLayer(self): 122 | """Zoom to extent of active layer.""" 123 | pass 124 | 125 | def addVectorLayer(self, path, base_name, provider_key): 126 | """Add a vector layer. 127 | 128 | :param path: Path to layer. 129 | :type path: str 130 | 131 | :param base_name: Base name for layer. 132 | :type base_name: str 133 | 134 | :param provider_key: Provider key e.g. 'ogr' 135 | :type provider_key: str 136 | """ 137 | pass 138 | 139 | def addRasterLayer(self, path, base_name): 140 | """Add a raster layer given a raster layer file name 141 | 142 | :param path: Path to layer. 143 | :type path: str 144 | 145 | :param base_name: Base name for layer. 146 | :type base_name: str 147 | """ 148 | pass 149 | 150 | def activeLayer(self): 151 | """Get pointer to the active layer (layer selected in the legend).""" 152 | # noinspection PyArgumentList 153 | layers = QgsMapLayerRegistry.instance().mapLayers() 154 | for item in layers: 155 | return layers[item] 156 | 157 | def addToolBarIcon(self, action): 158 | """Add an icon to the plugins toolbar. 159 | 160 | :param action: Action to add to the toolbar. 161 | :type action: QAction 162 | """ 163 | pass 164 | 165 | def removeToolBarIcon(self, action): 166 | """Remove an action (icon) from the plugin toolbar. 167 | 168 | :param action: Action to add to the toolbar. 169 | :type action: QAction 170 | """ 171 | pass 172 | 173 | def addToolBar(self, name): 174 | """Add toolbar with specified name. 175 | 176 | :param name: Name for the toolbar. 177 | :type name: str 178 | """ 179 | pass 180 | 181 | def mapCanvas(self): 182 | """Return a pointer to the map canvas.""" 183 | return self.canvas 184 | 185 | def mainWindow(self): 186 | """Return a pointer to the main window. 187 | 188 | In case of QGIS it returns an instance of QgisApp. 189 | """ 190 | pass 191 | 192 | def addDockWidget(self, area, dock_widget): 193 | """Add a dock widget to the main window. 194 | 195 | :param area: Where in the ui the dock should be placed. 196 | :type area: 197 | 198 | :param dock_widget: A dock widget to add to the UI. 199 | :type dock_widget: QDockWidget 200 | """ 201 | pass 202 | 203 | def legendInterface(self): 204 | """Get the legend.""" 205 | return self.canvas 206 | -------------------------------------------------------------------------------- /test/tenbytenraster.asc: -------------------------------------------------------------------------------- 1 | NCOLS 10 2 | NROWS 10 3 | XLLCENTER 1535380.000000 4 | YLLCENTER 5083260.000000 5 | DX 10 6 | DY 10 7 | NODATA_VALUE -9999 8 | 0 1 2 3 4 5 6 7 8 9 9 | 0 1 2 3 4 5 6 7 8 9 10 | 0 1 2 3 4 5 6 7 8 9 11 | 0 1 2 3 4 5 6 7 8 9 12 | 0 1 2 3 4 5 6 7 8 9 13 | 0 1 2 3 4 5 6 7 8 9 14 | 0 1 2 3 4 5 6 7 8 9 15 | 0 1 2 3 4 5 6 7 8 9 16 | 0 1 2 3 4 5 6 7 8 9 17 | 0 1 2 3 4 5 6 7 8 9 18 | CRS 19 | NOTES 20 | -------------------------------------------------------------------------------- /test/tenbytenraster.asc.aux.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | Point 4 | 5 | 6 | 7 | 9 8 | 4.5 9 | 0 10 | 2.872281323269 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /test/tenbytenraster.keywords: -------------------------------------------------------------------------------- 1 | title: Tenbytenraster 2 | -------------------------------------------------------------------------------- /test/tenbytenraster.lic: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Tim Sutton, Linfiniti Consulting CC 5 | 6 | 7 | 8 | tenbytenraster.asc 9 | 2700044251 10 | Yes 11 | Tim Sutton 12 | Tim Sutton (QGIS Source Tree) 13 | Tim Sutton 14 | This data is publicly available from QGIS Source Tree. The original 15 | file was created and contributed to QGIS by Tim Sutton. 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /test/tenbytenraster.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] -------------------------------------------------------------------------------- /test/tenbytenraster.qml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 0 26 | 27 | -------------------------------------------------------------------------------- /test/test_init.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """Tests QGIS plugin init.""" 3 | 4 | __author__ = 'Tim Sutton ' 5 | __revision__ = '$Format:%H$' 6 | __date__ = '17/10/2010' 7 | __license__ = "GPL" 8 | __copyright__ = 'Copyright 2012, Australia Indonesia Facility for ' 9 | __copyright__ += 'Disaster Reduction' 10 | 11 | import os 12 | import unittest 13 | import logging 14 | import configparser 15 | 16 | LOGGER = logging.getLogger('QGIS') 17 | 18 | 19 | class TestInit(unittest.TestCase): 20 | """Test that the plugin init is usable for QGIS. 21 | 22 | Based heavily on the validator class by Alessandro 23 | Passoti available here: 24 | 25 | http://github.com/qgis/qgis-django/blob/master/qgis-app/ 26 | plugins/validator.py 27 | 28 | """ 29 | 30 | def test_read_init(self): 31 | """Test that the plugin __init__ will validate on plugins.qgis.org.""" 32 | 33 | # You should update this list according to the latest in 34 | # https://github.com/qgis/qgis-django/blob/master/qgis-app/ 35 | # plugins/validator.py 36 | 37 | required_metadata = [ 38 | 'name', 39 | 'description', 40 | 'version', 41 | 'qgisMinimumVersion', 42 | 'email', 43 | 'author'] 44 | 45 | file_path = os.path.abspath(os.path.join( 46 | os.path.dirname(__file__), os.pardir, 47 | 'metadata.txt')) 48 | LOGGER.info(file_path) 49 | metadata = [] 50 | parser = configparser.ConfigParser() 51 | parser.optionxform = str 52 | parser.read(file_path) 53 | message = 'Cannot find a section named "general" in %s' % file_path 54 | assert parser.has_section('general'), message 55 | metadata.extend(parser.items('general')) 56 | 57 | for expectation in required_metadata: 58 | message = ('Cannot find metadata "%s" in metadata source (%s).' % ( 59 | expectation, file_path)) 60 | 61 | self.assertIn(expectation, dict(metadata), message) 62 | 63 | if __name__ == '__main__': 64 | unittest.main() 65 | -------------------------------------------------------------------------------- /test/test_qgis_environment.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """Tests for QGIS functionality. 3 | 4 | 5 | .. note:: This program is free software; you can redistribute it and/or modify 6 | it under the terms of the GNU General Public License as published by 7 | the Free Software Foundation; either version 2 of the License, or 8 | (at your option) any later version. 9 | 10 | """ 11 | __author__ = 'tim@linfiniti.com' 12 | __date__ = '20/01/2011' 13 | __copyright__ = ('Copyright 2012, Australia Indonesia Facility for ' 14 | 'Disaster Reduction') 15 | 16 | import os 17 | import unittest 18 | from qgis.core import ( 19 | QgsProviderRegistry, 20 | QgsCoordinateReferenceSystem, 21 | QgsRasterLayer) 22 | 23 | from .utilities import get_qgis_app 24 | QGIS_APP = get_qgis_app() 25 | 26 | 27 | class QGISTest(unittest.TestCase): 28 | """Test the QGIS Environment""" 29 | 30 | def test_qgis_environment(self): 31 | """QGIS environment has the expected providers""" 32 | 33 | r = QgsProviderRegistry.instance() 34 | self.assertIn('gdal', r.providerList()) 35 | self.assertIn('ogr', r.providerList()) 36 | self.assertIn('postgres', r.providerList()) 37 | 38 | def test_projection(self): 39 | """Test that QGIS properly parses a wkt string. 40 | """ 41 | crs = QgsCoordinateReferenceSystem() 42 | wkt = ( 43 | 'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",' 44 | 'SPHEROID["WGS_1984",6378137.0,298.257223563]],' 45 | 'PRIMEM["Greenwich",0.0],UNIT["Degree",' 46 | '0.0174532925199433]]') 47 | crs.createFromWkt(wkt) 48 | auth_id = crs.authid() 49 | expected_auth_id = 'EPSG:4326' 50 | self.assertEqual(auth_id, expected_auth_id) 51 | 52 | # now test for a loaded layer 53 | path = os.path.join(os.path.dirname(__file__), 'tenbytenraster.asc') 54 | title = 'TestRaster' 55 | layer = QgsRasterLayer(path, title) 56 | auth_id = layer.crs().authid() 57 | self.assertEqual(auth_id, expected_auth_id) 58 | 59 | if __name__ == '__main__': 60 | unittest.main() 61 | -------------------------------------------------------------------------------- /test/test_translations.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """Safe Translations Test. 3 | 4 | .. note:: This program is free software; you can redistribute it and/or modify 5 | it under the terms of the GNU General Public License as published by 6 | the Free Software Foundation; either version 2 of the License, or 7 | (at your option) any later version. 8 | 9 | """ 10 | from .utilities import get_qgis_app 11 | 12 | __author__ = 'ismailsunni@yahoo.co.id' 13 | __date__ = '12/10/2011' 14 | __copyright__ = ('Copyright 2012, Australia Indonesia Facility for ' 15 | 'Disaster Reduction') 16 | import unittest 17 | import os 18 | 19 | from qgis.PyQt.QtCore import QCoreApplication, QTranslator 20 | 21 | QGIS_APP = get_qgis_app() 22 | 23 | 24 | class SafeTranslationsTest(unittest.TestCase): 25 | """Test translations work.""" 26 | 27 | def setUp(self): 28 | """Runs before each test.""" 29 | if 'LANG' in iter(os.environ.keys()): 30 | os.environ.__delitem__('LANG') 31 | 32 | def tearDown(self): 33 | """Runs after each test.""" 34 | if 'LANG' in iter(os.environ.keys()): 35 | os.environ.__delitem__('LANG') 36 | 37 | def test_qgis_translations(self): 38 | """Test that translations work.""" 39 | parent_path = os.path.join(__file__, os.path.pardir, os.path.pardir) 40 | dir_path = os.path.abspath(parent_path) 41 | file_path = os.path.join( 42 | dir_path, 'i18n', 'af.qm') 43 | translator = QTranslator() 44 | translator.load(file_path) 45 | QCoreApplication.installTranslator(translator) 46 | 47 | expected_message = 'Goeie more' 48 | real_message = QCoreApplication.translate("@default", 'Good morning') 49 | self.assertEqual(real_message, expected_message) 50 | 51 | 52 | if __name__ == "__main__": 53 | suite = unittest.makeSuite(SafeTranslationsTest) 54 | runner = unittest.TextTestRunner(verbosity=2) 55 | runner.run(suite) 56 | -------------------------------------------------------------------------------- /test/test_write.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2021/12/21 22:35 4 | # @Author : gjg 5 | # @Site : 6 | # @File : test_write.py 7 | # @Software: PyCharm 8 | 9 | # %% - QGIS 10 | import os 11 | # from qgis._core import QgsProject, QgsApplication 12 | from qgis.core import (QgsVectorLayer, QgsProject) 13 | from qgis.core import QgsClassificationEqualInterval 14 | 15 | v_layer = QgsVectorLayer("..data/PM25.shp", "PM25", "ogr") 16 | for field in v_layer.fields(): 17 | print(field.name(), field.typeName()) 18 | 19 | a = QgsClassificationEqualInterval 20 | a.classes(v_layer, expression='GDP', nclasses=5) 21 | 22 | # # vlayer = QgsVectorLayer(path_to_airports_layer, "Airports layer", "ogr") 23 | # if not v_layer.isValid(): 24 | # print("Layer failed to load!") 25 | # else: 26 | # QgsProject.instance().addMapLayer(v_layer) 27 | 28 | 29 | # %% 30 | 31 | # get the path to a geopackage e.g. /usr/share/qgis/resources/data/world_map.gpkg 32 | path_to_gpkg = '../data/world_map.gpkg' 33 | gpkg_countries_layer = path_to_gpkg + "|layername=countries" 34 | vlayer = QgsVectorLayer(gpkg_countries_layer, "Countries layer", "ogr") 35 | if not vlayer.isValid(): 36 | print("Layer failed to load!") 37 | else: 38 | QgsProject.instance().addMapLayer(vlayer) 39 | 40 | # %% pandas 检测数据类型 41 | import pandas as pd 42 | from pandas.api.types import is_numeric_dtype 43 | 44 | df = pd.DataFrame() 45 | 46 | # %% 47 | import numpy as np 48 | 49 | a = np.array([1, 2, 11, 11, 2]) 50 | b = (a == 1) 51 | a[b][0] 52 | # b[True] 53 | 54 | # %% geo interval 55 | 56 | data = np.array([[1, 3, 4, 5, 2], 57 | [2, 3, 1, 6, 3], 58 | [1, 5, 2, 3, 1], 59 | [3, 4, 9, 2, 1]]) 60 | 61 | data = whiten(data) 62 | 63 | # code book generation 64 | centroids, mean_value = kmeans(data, 3) 65 | 66 | print("Code book :\n", centroids, "\n") 67 | print("Mean of Euclidean distances :", 68 | mean_value.round(4)) 69 | 70 | clusters, distances = vq(data, centroids) 71 | 72 | print("Cluster index :", clusters, "\n") 73 | print("Distance from the centroids :", distances) 74 | 75 | # assign centroids and clusters 76 | centroids, clusters = kmeans2(data, 3, 77 | minit='random') 78 | 79 | print("Centroids :\n", centroids, "\n") 80 | print("Clusters :", clusters) 81 | 82 | # %% - kmeans2 83 | 84 | from scipy.cluster.vq import kmeans2 85 | import matplotlib.pyplot as plt 86 | 87 | rng = np.random.default_rng() 88 | a = rng.multivariate_normal([0, 6], [[2, 1], [1, 1.5]], size=45) 89 | b = rng.multivariate_normal([2, 0], [[1, -1], [-1, 3]], size=30) 90 | c = rng.multivariate_normal([6, 4], [[5, 0], [0, 1.2]], size=25) 91 | z = np.concatenate((a, b, c)) 92 | rng.shuffle(z) 93 | 94 | centroid, label = kmeans2(z, 3, minit='points') 95 | 96 | 97 | # %% geometric interval 98 | 99 | def geometric(values, classes): 100 | _min = min(values) 101 | _max = max(values) + 0.00001 # temporary bug correction: without +0.00001 the max value is not rendered in map 102 | X = (_max / _min) ** (1 / float(classes)) 103 | res = [_min * X ** k for k in range(classes + 1)] 104 | return res 105 | 106 | 107 | res = geometric(np.array([0.179589017, 0.026539462]), 7) 108 | # %% 109 | test = np.array([[0.026539462, 0.046593756, 0.020054, 0], 110 | [0.046593757, 0.059616646, 0.013023, 1.539927], 111 | [0.059616647, 0.068073471, 0.008457, 1.539927], 112 | [0.068073472, 0.081096361, 0.013023, 0.649382], 113 | [0.081096362, 0.101150655, 0.020054, 0.649382], 114 | [0.101150656, 0.132032793, 0.030882, 0.649382], 115 | [0.132032794, 0.179589017, 0.047556, 0.649382]]) 116 | 117 | # %% 118 | import plotly.express as px 119 | 120 | df = px.data.tips() 121 | fig = px.histogram(df, x="total_bill", subplot) 122 | fig.show() 123 | 124 | # %% 125 | import plotly.graph_objects as go 126 | from plotly.subplots import make_subplots 127 | 128 | x = ['1970-01-01', '1970-01-01', '1970-02-01', '1970-04-01', '1970-01-02', 129 | '1972-01-31', '1970-02-13', '1971-04-19'] 130 | 131 | fig = make_subplots(rows=3, cols=2, subplot_titles=['1','2','3','4','5']) 132 | 133 | trace0 = go.Histogram(x=x, nbinsx=4, name='test') 134 | trace1 = go.Histogram(x=x, nbinsx=8) 135 | trace2 = go.Histogram(x=x, nbinsx=10) 136 | trace3 = go.Histogram(x=x, 137 | xbins=dict( 138 | start='1969-11-15', 139 | end='1972-03-31', 140 | size='M18'), # M18 stands for 18 months 141 | autobinx=False 142 | ) 143 | trace4 = go.Histogram(x=x, 144 | xbins=dict( 145 | start='1969-11-15', 146 | end='1972-03-31', 147 | size='M4'), # 4 months bin size 148 | autobinx=False 149 | ) 150 | trace5 = go.Histogram(x=x, 151 | xbins=dict( 152 | start='1969-11-15', 153 | end='1972-03-31', 154 | size='M2'), # 2 months 155 | autobinx=False 156 | ) 157 | 158 | fig.append_trace(trace0, 1, 1) 159 | fig.append_trace(trace1, 1, 2) 160 | fig.append_trace(trace2, 2, 1) 161 | fig.append_trace(trace3, 2, 2) 162 | fig.append_trace(trace4, 3, 1) 163 | fig.append_trace(trace5, 3, 2) 164 | 165 | fig.show() 166 | 167 | #%% 168 | import numpy as np 169 | import pandas as pd 170 | from itertools import combinations 171 | a = np.arange(4) 172 | # 173 | com_a2 = combinations(a,2) 174 | 175 | com_a2_list = [i for i in com_a2] 176 | a_com_value = [a[list(i)] for i in com_a2_list] 177 | fuc = np.array([[i.min(),i.max(),i.sum()] for i in a_com_value]) 178 | func_df = pd.DataFrame(data=fuc,index=com_a2_list,columns=['inter_min','inter_max','inter_sum']) 179 | func_df 180 | # func_df[['inter_min','inter_sum','inter_max']] - func_df['inter_sum'] -------------------------------------------------------------------------------- /test/utilities.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """Common functionality used by regression tests.""" 3 | 4 | import sys 5 | import logging 6 | 7 | 8 | LOGGER = logging.getLogger('QGIS') 9 | QGIS_APP = None # Static variable used to hold hand to running QGIS app 10 | CANVAS = None 11 | PARENT = None 12 | IFACE = None 13 | 14 | 15 | def get_qgis_app(): 16 | """ Start one QGIS application to test against. 17 | 18 | :returns: Handle to QGIS app, canvas, iface and parent. If there are any 19 | errors the tuple members will be returned as None. 20 | :rtype: (QgsApplication, CANVAS, IFACE, PARENT) 21 | 22 | If QGIS is already running the handle to that app will be returned. 23 | """ 24 | 25 | try: 26 | from qgis.PyQt import QtGui, QtCore 27 | from qgis.core import QgsApplication 28 | from qgis.gui import QgsMapCanvas 29 | from .qgis_interface import QgisInterface 30 | except ImportError: 31 | return None, None, None, None 32 | 33 | global QGIS_APP # pylint: disable=W0603 34 | 35 | if QGIS_APP is None: 36 | gui_flag = True # All test will run qgis in gui mode 37 | #noinspection PyPep8Naming 38 | QGIS_APP = QgsApplication(sys.argv, gui_flag) 39 | # Make sure QGIS_PREFIX_PATH is set in your env if needed! 40 | QGIS_APP.initQgis() 41 | s = QGIS_APP.showSettings() 42 | LOGGER.debug(s) 43 | 44 | global PARENT # pylint: disable=W0603 45 | if PARENT is None: 46 | #noinspection PyPep8Naming 47 | PARENT = QtGui.QWidget() 48 | 49 | global CANVAS # pylint: disable=W0603 50 | if CANVAS is None: 51 | #noinspection PyPep8Naming 52 | CANVAS = QgsMapCanvas(PARENT) 53 | CANVAS.resize(QtCore.QSize(400, 400)) 54 | 55 | global IFACE # pylint: disable=W0603 56 | if IFACE is None: 57 | # QgisInterface is a stub implementation of the QGIS plugin interface 58 | #noinspection PyPep8Naming 59 | IFACE = QgisInterface(CANVAS) 60 | 61 | return QGIS_APP, CANVAS, IFACE, PARENT 62 | -------------------------------------------------------------------------------- /ui/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2022/1/11 17:48 4 | # @Author : gjg 5 | # @Site : 6 | # @File : __init__.py.py 7 | # @Software: PyCharm 8 | --------------------------------------------------------------------------------