├── .gitignore
├── rscripts
├── Frequency_table.rsx
├── qqplot.rsx
├── Raster_histogram.rsx
├── Kolmogrov-Smirnov_test.rsx
├── Polygone.rsx
├── Histogram.rsx
├── Histogram.rsx.help
├── barplots.rsx
├── Density_curve.rsx.help
├── Random_sampling_grid.rsx
├── qqplot.rsx.help
├── Regular_sampling_grid.rsx
├── Minimum_convex_polygon.rsx
├── F_function.rsx
├── G_function.rsx
├── Raster_histogram.rsx.help
├── Polygone.rsx.help
├── scatterplot_regressione.rsx
├── frequency_plot.rsx.help
├── Quadrat_analysis.rsx
├── scatterplot_types.rsx.help
├── CART.rsx.help
├── scatterplot_log.rsx.help
├── Extract_points_from_line.rsx.help
├── Summarize_by_field.rsx.help
├── CART.rsx
├── Distance.rsx.help
├── Alpha_shape.rsx.help
├── prova_ggplot2.rsx.help
├── Simple_Linear_Regression.rsx
├── Summarize_by_two_fields.rsx.help
├── ggplot_scatterplot.rsx
├── Frequency_table.rsx.help
├── Kolmogrov-Smirnov_test.rsx.help
├── Density_curve.rsx
├── ggplot_scatterplot.rsx.help
├── Monte-Carlo_spatial_randomness.rsx
├── ANOVA.rsx
├── ANOVA.rsx.help
├── Autocor_spatiale.rsx.help
├── ACP_var.rsx.help
├── Regular_sampling_grid.rsx.help
├── Multiple_Regression.rsx
├── ternaryPlots.rsx.help
├── Close_neighbor.rsx.help
├── Extract_points_from_line.rsx
├── Kriging.rsx.help
├── scatterplot_types.rsx
├── Ripley_-_Rasson_spatial_domain.rsx
├── ACP_cercle.rsx.help
├── Tobler.rsx.help
├── Douglas-Peucker.rsx.help
├── Ripley_-_Rasson_spatial_domain.rsx.help
├── AFDM.rsx.help
├── CAH.rsx.help
├── Random_sampling_grid.rsx.help
├── frequency_plot.rsx
├── ACP_contribution.rsx.help
├── Kriging_with_model_selection.rsx.help
├── Summarize_by_field.rsx
├── Simple_Linear_Regression.rsx.help
├── Summary_statistics.rsx.help
├── ACP_cercle.rsx
├── Advanced_raster_histogram.rsx
├── scatterplot_log.rsx
├── Douglas-Peucker_with_choice.rsx.help
├── Alpha_shape.rsx
├── Kernel_density_estimation.rsx.help
├── Distance.rsx
├── ACP_individus.rsx.help
├── Multiple_Regression.rsx.help
├── Summarize_by_two_fields.rsx
├── barplots.rsx.help
├── AFC.rsx.help
├── Advanced_raster_histogram.rsx.help
├── Summary_statistics.rsx
├── Quadrat_analysis.rsx.help
├── ACP_contribution.rsx
├── ternaryPlots.rsx
├── AFDM.rsx
├── Selection_with_criterion_choice.rsx.help
├── A-star.rsx.help
├── ACP_var.rsx
├── Autocor_spatiale.rsx
├── Selection_with_r2.rsx
├── Selection_Cp.rsx
├── Selection_with_r2_adjusted.rsx
├── Selection_with_Bayesian_Information_Criterion.rsx
├── CAH.rsx
├── Relative_distribution_(distance_covariate).rsx
├── raster-attribute-table.rsx.help
├── Selection_with_r2.rsx.help
├── Close_neighbor.rsx
├── Selection_with_Cp.rsx.help
├── Selection_with_r2_adjusted.rsx.help
├── Inverse_Distance_Weigthing.rsx.help
├── raster-attribute-table.rsx
├── Relative_distribution_(raster_covariate).rsx
├── Selection_with_Bayesian_Information_Criterion.rsx.help
├── Minimum_convex_polygon.rsx.help
├── Tobler.rsx
├── Variogram Modelling.rsx.help
├── Inverse_Distance_Weigthing_with_method_selection.rsx.help
├── Monte-Carlo_spatial_randomness.rsx.help
├── F_function.rsx.help
├── ACP_individus.rsx
├── Kriging.rsx
├── Variogram Modelling.rsx
├── AFC.rsx
├── G_function.rsx.help
├── Relative_distribution_(distance_covariate).rsx.help
├── Kriging_with_model_selection.rsx
├── Relative_distribution_(raster_covariate).rsx.help
├── Ordinary Kriging.rsx.help
├── Selection_with_criterion_choice.rsx
├── Douglas-Peucker_with_choice.rsx
├── list.txt
├── Ordinary Kriging.rsx
├── Douglas-Peucker.rsx
├── Kernel_density_estimation.rsx
└── Inverse_Distance_Weigthing.rsx
├── scripts
├── Points on touching lines.py.help
├── Points on crossing lines.py.help
├── Keep_n_biggest_parts.py.help
├── Assign_prj.py
├── Assign_prj.py.help
├── Cut_by_field.py.help
├── Square_grid_from_layer_extent.py.help
├── Buffer Contour.py.help
├── Remove_parts.py.help
├── Merge_all_lines_in_layer.py.help
├── realcentroid_algorithm.py.help
├── Read_file_content_into_string.py
├── Save_features_filtered_by_expression.py.help
├── Fill_holes.py.help
├── Read_file_content_into_string.py.help
├── make_landsat8_footprints.py.help
├── Hex_grid_from_layer_bounds.py
├── Save_selected_features.py
├── ellipsoidal_area.py.help
├── spatial_cross_join_attributes.py.help
├── Create_tiling_from_vector_layer.py
├── Define_1_raster_layer_properties.py.help
├── Points_from_vector.py
├── Create_vector_layer_from_SQL_Query.py.help
├── Set_multiple_raster_layers_properties.py.help
├── EquivalentNumField.py.help
├── Generate_Unique_values_renderer.py.help
├── Contour.py.help
├── Square_grid_from_layer_extent.py
├── Merge_all_lines_in_layer.py
├── Create_vector_layer_from_Postgis_table.py.help
├── Define_vector_layer_properties.py.help
├── Define_1_vector_layer_properties.py.help
├── Define_1_raster_layer_properties.py
├── FrequencyStats.py.help
├── Summarize.py
├── distance_lines_between_points.py
├── spatial_cross_join_attributes.py
├── Batch_replace_in_string_via_regex_dictionary.py
├── Save_features_filtered_by_expression.py
├── Split_vector_layer_by_attribute.py
├── Fill_holes.py
├── Unique_values_count.py.help
├── Keep_n_biggest_parts.py
├── Create_vector_layer_from_Postgis_table.py
├── Set_multiple_raster_layers_properties.py
├── DissolveWithStats.py.help
├── Remove_parts.py
├── Define_1_vector_layer_properties.py
├── pygraticule.py.help
├── Define_vector_layer_properties.py
├── Cut_by_field.py
├── EquivalentNumField.py
├── Buffer Contour.py
├── CSV_RGB_or_HEX_to_categorized_style.py.help
├── Unique_values_count.py
├── realcentroid_algorithm.py
├── predominant_category.py
├── make_landsat8_footprints.py
├── CSV_R-G-B_to_categorized_style.py.help
├── Generate_Unique_values_renderer.py
├── Contour.py
├── Points on crossing lines.py
├── Cadastre_FR_WMS.py
├── Cadastre_FR_WMS.py.help
├── list.txt
├── Points on touching lines.py
├── Create_vector_layer_from_SQL_Query.py
├── ellipsoidal_area.py
├── Quick_PostgreSQL_Model_Builder_from_description_stored_in_table_sheet.py.help
├── Create_rasters_from_canvas_for_each_vector_layer_feature_extent.py
├── Extract_raster_values_to_CSV.py
└── Extract_raster_values_to_shapefile.py
├── README.md
├── models
├── list.txt
├── model_PCA.model.help
├── model_IDW.model.help
├── model_PCA.model
├── model_IDW.model
└── mult.model
├── help_converter.py
└── create_lists.py
/.gitignore:
--------------------------------------------------------------------------------
1 | .Rproj.user
2 | .Rhistory
3 | .RData
4 | .Ruserdata
5 | QGIS-Processing.Rproj
6 |
--------------------------------------------------------------------------------
/rscripts/Frequency_table.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field=Field Layer
4 | >table(Layer[[Field]])
5 |
--------------------------------------------------------------------------------
/rscripts/qqplot.rsx:
--------------------------------------------------------------------------------
1 | ##Plots=group
2 | ##showplots
3 | ##Layer=vector
4 | ##X=Field Layer
5 | qqnorm(Layer[[X]])
6 | qqline(Layer[[X]])
7 |
--------------------------------------------------------------------------------
/rscripts/Raster_histogram.rsx:
--------------------------------------------------------------------------------
1 | ##Raster processing=group
2 | ##Layer = raster
3 | ##showplots
4 | hist(as.matrix(Layer),main="Histogram",xlab="Layer")
5 |
--------------------------------------------------------------------------------
/rscripts/Kolmogrov-Smirnov_test.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field=Field Layer
4 | library(nortest)
5 | >lillie.test(Layer[[Field]])
6 |
--------------------------------------------------------------------------------
/rscripts/Polygone.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##nombre= number 10
4 | library(sp)
5 | Layer@polygons[[nombre]]@Polygons[[1]]@coords
6 |
--------------------------------------------------------------------------------
/scripts/Points on touching lines.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_CREATOR": "Chourmo - v1", "ALG_DESC": "Extract nodes in a line layer where lines touch each other in buffer distance"}
--------------------------------------------------------------------------------
/rscripts/Histogram.rsx:
--------------------------------------------------------------------------------
1 | ##Vector processing=group
2 | ##showplots
3 | ##Layer=vector
4 | ##Field=Field Layer
5 | hist(Layer[[Field]],main=paste("Histogram of",Field),xlab=paste(Field))
6 |
--------------------------------------------------------------------------------
/scripts/Points on crossing lines.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_CREATOR": "Chourmo - v1", "ALG_DESC": "Extract nodes in a line layer where lines cross each other in buffer distance", "ALG_HELP_CREATOR": ""}
--------------------------------------------------------------------------------
/rscripts/Histogram.rsx.help:
--------------------------------------------------------------------------------
1 | {"Field": "A nuneric field.", "ALG_DESC": "This tool creates a histogram of the input numeric field using the hist() function.", "Layer": "A vector layer with a numeric field."}
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # This project is archived and stays only for reference
2 |
3 | QGIS resources repository
4 | ===========================
5 |
6 | A repository with scripts and models downloadable from the Processing toolbox
7 |
--------------------------------------------------------------------------------
/rscripts/barplots.rsx:
--------------------------------------------------------------------------------
1 | ##Vector processing=group
2 | ##Bar plots=name
3 | ##showplots
4 | ##Layer=vector
5 | ##Field=Field Layer
6 | barplot(table(Layer[[Field]]), main = paste("Bar plot of", Field), xlab = paste(Field))
7 |
--------------------------------------------------------------------------------
/rscripts/Density_curve.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script computes the density curve.", "ALG_CREATOR": "JEANDENANS L.", "Layer": "Input vector", "Field": "the field", "RPLOTS": "the density curve", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/rscripts/Random_sampling_grid.rsx:
--------------------------------------------------------------------------------
1 | ##Point pattern analysis=group
2 | ##Layer=vector
3 | ##Size=number 10
4 | ##Output= output vector
5 | pts=spsample(Layer,Size,type="random")
6 | Output=SpatialPointsDataFrame(pts, as.data.frame(pts))
7 |
--------------------------------------------------------------------------------
/rscripts/qqplot.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "QQPLOT for normal distribution", "ALG_CREATOR": "Matteo Ghetta", "Layer": "Vector layer in input", "ALG_VERSION": "1.0", "RPLOTS": "qqplot", "ALG_HELP_CREATOR": "Matteo Ghetta", "X": "X parameter"}
--------------------------------------------------------------------------------
/rscripts/Regular_sampling_grid.rsx:
--------------------------------------------------------------------------------
1 | ##Point pattern analysis=group
2 | ##Layer=vector
3 | ##Size=number 10
4 | ##Output= output vector
5 | pts=spsample(Layer,Size,type="regular")
6 | Output=SpatialPointsDataFrame(pts, as.data.frame(pts))
7 |
--------------------------------------------------------------------------------
/rscripts/Minimum_convex_polygon.rsx:
--------------------------------------------------------------------------------
1 | ##Home Range Analysis=group
2 | ##Layer=vector
3 | ##Percentage=number 10
4 | ##Field=Field Layer
5 | ##Home_ranges=Output vector
6 | library(adehabitatHR)
7 | Home_ranges<-mcp(Layer[,Field],percent=Percentage)
8 |
--------------------------------------------------------------------------------
/rscripts/F_function.rsx:
--------------------------------------------------------------------------------
1 | ##Point pattern analysis=group
2 | ##Layer=vector point
3 | ##Nsim=number 10
4 | ##showplots
5 | library("maptools")
6 | library("spatstat")
7 | ppp=as(as(Layer, "SpatialPoints"),"ppp")
8 | plot(envelope(ppp, Fest, nsim=Nsim))
9 |
--------------------------------------------------------------------------------
/rscripts/G_function.rsx:
--------------------------------------------------------------------------------
1 | ##Point pattern analysis=group
2 | ##Layer=vector point
3 | ##Nsim=number 10
4 | ##showplots
5 | library("maptools")
6 | library("spatstat")
7 | ppp=as(as(Layer, "SpatialPoints"),"ppp")
8 | plot(envelope(ppp, Gest, nsim=Nsim))
9 |
--------------------------------------------------------------------------------
/rscripts/Raster_histogram.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_CREATOR": "Victor Olaya, volayaf(at)gmail.com", "ALG_DESC": "The script creates a raster histogram.", "Layer ": "An input raster.", "RPLOTS": "Histogram", "ALG_HELP_CREATOR": "Filipe S. Dias, filipesdias(at)gmail.co"}
--------------------------------------------------------------------------------
/rscripts/Polygone.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_CREATOR": "JEANDENANS L.", "nombre": "the number of the polygon which we want the coordinates", "ALG_DESC": "This script extrats the coordinates of polygons", "Layer": "Input vector with polygons", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/rscripts/scatterplot_regressione.rsx:
--------------------------------------------------------------------------------
1 | ##My scripts=group
2 | ##showplots
3 | ##Layer=vector
4 | ##X=Field Layer
5 | ##Y=Field Layer
6 | ##Title=string
7 | plot(Layer[[X]], Layer[[Y]], xlab=X, ylab=Y, main=Title)+
8 | abline(lm(Layer[[Y]]~Layer[[X]]))
9 |
10 |
11 |
--------------------------------------------------------------------------------
/scripts/Keep_n_biggest_parts.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_CREATOR": "Chourmo - v1", "To_keep": "Number of parts to keep, must be larger than 0", "ALG_DESC": "Keep n biggest sub polygon in a multipolygon, without exploding it...", "ALG_HELP_CREATOR": "Chourmo - v1", "Polygons": "Input layer"}
--------------------------------------------------------------------------------
/rscripts/frequency_plot.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Frequency plot with mean and median lines", "ALG_CREATOR": "Matteo Ghetta", "Layer": "Vector layer in input", "Field": "Numeric field of the vector layer", "RPLOTS": "Frequency plot", "ALG_HELP_CREATOR": "Matteo Ghetta", "ALG_VERSION": "0.1"}
--------------------------------------------------------------------------------
/models/list.txt:
--------------------------------------------------------------------------------
1 | model_IDW.model,1,model IDW
2 | model_PCA.model,1,model PCA
3 | modelwithin.model,1,modelwithin
4 | mult.model,1,mult
5 | point2polygon.model,1,point2polygon
6 | polygon_skeleton.model,1,polygon skeleton
7 | rasterizeandfill.model,1,rasterizeandfill
8 | watersheds.model,1,watersheds
--------------------------------------------------------------------------------
/rscripts/Quadrat_analysis.rsx:
--------------------------------------------------------------------------------
1 | ##Point pattern analysis=group
2 | ##Layer=vector point
3 | ##showplots
4 | library("maptools")
5 | library("spatstat")
6 | ppp=as(as(Layer, "SpatialPoints"),"ppp")
7 | qc=quadratcount(ppp)
8 | plot(Layer)
9 | plot(qc, add=TRUE)
10 | >quadrat.test(ppp);
11 |
--------------------------------------------------------------------------------
/rscripts/scatterplot_types.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Scatterplot", "ALG_CREATOR": "Matteo Ghetta", "Layer": "Vector layer in input", "ALG_VERSION": "1.0", "RPLOTS": "Plot in output", "ALG_HELP_CREATOR": "Matteo Ghetta", "Y": "Y parameter", "X": "X parameter", "type": "Plot type (point, line, both)"}
--------------------------------------------------------------------------------
/scripts/Assign_prj.py:
--------------------------------------------------------------------------------
1 | ##Vector=group
2 | ##layer=multiple vector
3 | ##file_prj=file
4 |
5 | from qgis.core import *
6 | import shutil
7 |
8 | my_list = layer.split(",")
9 | for i in my_list:
10 | a=i.replace(".shp",".prj")
11 | shutil.copy2(file_prj,a)
12 | progress.setInfo(a)
13 |
--------------------------------------------------------------------------------
/rscripts/CART.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does a Classification And Regression Trees (CART) and gives the plot with the tree.", "ALG_CREATOR": "JEANDENANS L.", "Field1": "The field", "Layer": "Input vector with at least one field.", "RPLOTS": "the tree of the CART", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/rscripts/scatterplot_log.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Scatterplot with different axis scales", "ALG_CREATOR": "Matteo Ghetta", "Layer": "Vector layer in input", "ALG_VERSION": "1.0", "RPLOTS": "Output plot", "ALG_HELP_CREATOR": "Matteo Ghetta", "Y": "X parameter", "X": "X parameter", "type": "Scale type (normal, logarithmic)"}
--------------------------------------------------------------------------------
/rscripts/Extract_points_from_line.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script extracts coordinates of all points of a line", "ALG_CREATOR": "JEANDENANS L.", "Layer": "Input vector with lines", "ALG_HELP_CREATOR": "JEANDENANS L.", "nombre": "number of the line which we want the coordinates", "Output": "a shapefile with these points"}
--------------------------------------------------------------------------------
/rscripts/Summarize_by_field.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "", "ALG_CREATOR": "Filipe S. Dias", "Layer": "This script summarizes a numeric field by a factor field. It requires packages \"doBy\",\"foreign\" and \"plyr\".", "Numeric_field": "Numeric field", "Factor": "Factor field", "Folder": "Output folder. This is NOT optional."}
2 |
--------------------------------------------------------------------------------
/rscripts/CART.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##showplots
5 | library(cluster)
6 | library(rpart)
7 | arbre <- rpart(Layer[[Field1]]~.,Layer)
8 | arbre
9 | plot(arbre, main="Arbre", branch=1, compress=T, margin=0.1)
10 | text(arbre,splits=T, fancy=T, use.n=F, pretty=0, all=T)
11 |
--------------------------------------------------------------------------------
/rscripts/Distance.rsx.help:
--------------------------------------------------------------------------------
1 | {"Layer2": "a line", "ALG_DESC": "This script calculates the distance between two geometries: a line and points.", "ALG_CREATOR": "JEANDENANS L.", "Layer1": "a set of data (points)", "Field": "identification of points", "ALG_HELP_CREATOR": "JEANDENANS L.", "output": "a shapfile with points and the distance."}
--------------------------------------------------------------------------------
/rscripts/Alpha_shape.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script computes a concave hull with the method of alpha-shape and gives a shapfile with lines of the concave hull.", "ALG_CREATOR": "JEANDENANS L.", "Layer": "Input vector with points", "ALG_HELP_CREATOR": "JEANDENANS L.", "alpha": "a number >0", "Output": "a shapefile with the concave hull"}
--------------------------------------------------------------------------------
/rscripts/prova_ggplot2.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Simple scatterplot with ggplot (be sure ggplot2 is installed on your computer!)", "ALG_CREATOR": "Matteo Ghetta", "Layer": "Vector layer in input", "ALG_VERSION": "1.0", "RPLOTS": "Plot in output", "ALG_HELP_CREATOR": "Matteo Ghetta", "Y": "Y paramter", "X": "X paramter", "Z": "Grouping variable"}
--------------------------------------------------------------------------------
/rscripts/Simple_Linear_Regression.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##Field2=Field Layer
5 | ##showplots
6 | test<-lm(Layer[[Field1]] ~Layer[[Field2]] )
7 | par(mfrow=c(2,2))
8 | plot(test,which=1)
9 | plot(test,which=2)
10 | plot(test,which=3)
11 | plot(test,which=4)
12 | summary(test)
13 |
--------------------------------------------------------------------------------
/rscripts/Summarize_by_two_fields.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script summarizes a numeric field by two factor fields. It requires packages \"doBy\",\"foreign\" and \"plyr\".", "Factor1": "Factor field", "Factor2": "Factor field", "ALG_CREATOR": "Filipe S. Dias ", "Layer": "Layer", "Numeric_field": "Numeric field", "Folder": "Output folder"}
2 |
--------------------------------------------------------------------------------
/scripts/Assign_prj.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Assign projection to shapefile copying the .prj file", "ALG_CREATOR": "Luca Lanteri", "layer": "the input layer", "ALG_VERSION": "0.1", "ALG_HELP_CREATOR": "Luca Lanteri", "input": "[layer]: The list of the layer to assign projection\n[file prj]: The prj file", "file_prj": "The file containing prj information"}
2 |
--------------------------------------------------------------------------------
/rscripts/ggplot_scatterplot.rsx:
--------------------------------------------------------------------------------
1 | ##Vector processing=group
2 | ##showplots
3 | ##Layer=vector
4 | ##X=Field Layer
5 | ##Y=Field Layer
6 | ##Group=Field Layer
7 | require(ggplot2)
8 | ggplot()+
9 | geom_point(aes(x=Layer[[X]],y=Layer[[Y]],
10 | color=as.factor(Layer[[Group]])))+
11 | theme(legend.title = element_blank())+
12 | xlab(X)+
13 | ylab(Y)
14 |
--------------------------------------------------------------------------------
/rscripts/Frequency_table.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This tool builds a frequency table using the table() function.", "R_CONSOLE_OUTPUT": "Frequency table.", "ALG_CREATOR": "Filipe S. Dias, filipesdias(at)gmail.com ", "Layer": "A vector layer with a numeric or string field.", "Field": "A string or numeric field.", "ALG_HELP_CREATOR": "Filipe S. Dias, filipesdias(at)gmail.com "}
--------------------------------------------------------------------------------
/rscripts/Kolmogrov-Smirnov_test.rsx.help:
--------------------------------------------------------------------------------
1 | {"Field": "A numeric field.", "ALG_DESC": "This script performs the Lilliefors (Kolmogrov-Smirnov) test for the composite hypothesis of normality.\n\nR dependencies: library \"nortest\"\n\n", "R_CONSOLE_OUTPUT": "The results of the Lilliefors (Kolmogrov-Smirnov) test.", "Layer": "A vector containing at least one numeric field.", "null": ""}
--------------------------------------------------------------------------------
/rscripts/Density_curve.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field=Field Layer
4 | ##showplots
5 | Densite <- density(Layer[[Field]])
6 | plot(Densite$x, Densite$y, type="b")
7 | abline(v = mean(Layer[[Field]]), col = "red")
8 | abline(v = mean(Layer[[Field]])+2*sd(Layer[[Field]]), col = "green")
9 | abline(v = mean(Layer[[Field]])-2*sd(Layer[[Field]]), col = "green")
--------------------------------------------------------------------------------
/rscripts/ggplot_scatterplot.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Simple scatterplot (with grouping option) with ggplot.\n\nNote: be sure ggplot2 package is installed in your computer!", "Group": "Grouping variable", "ALG_CREATOR": "Matteo Ghetta", "Layer": "Input vector layer", "ALG_VERSION": "1.0", "RPLOTS": "ggplot graph", "ALG_HELP_CREATOR": "Matteo Ghetta", "Y": "X parameter", "X": "X parameter"}
--------------------------------------------------------------------------------
/rscripts/Monte-Carlo_spatial_randomness.rsx:
--------------------------------------------------------------------------------
1 | ##Point pattern analysis=group
2 | ##Layer=vector point
3 | ##Simulations=number 100
4 | ##Optional_plot_name=string
5 | ##showplots
6 | library(spatstat)
7 | library(maptools)
8 | sp <- as(Layer, "SpatialPoints")
9 | sp <- as(sp, "ppp")
10 | e <- envelope(sp, Kest, nsim = Simulations)
11 | >e
12 | plot(e, main = Optional_plot_name)
13 |
--------------------------------------------------------------------------------
/rscripts/ANOVA.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##Field2=Field Layer
5 | ##Field3=Field Layer
6 | ##Field4=Field Layer
7 | ##Field5=Field Layer
8 | test1<-lm(Layer[[Field1]]~Layer[[Field2]]+Layer[[Field3]]+Layer[[Field4]]+Layer[[Field5]] )
9 | test2<-lm(Layer[[Field1]]~Layer[[Field2]]+Layer[[Field4]]+Layer[[Field5]] )
10 | anova(test1,test2)
--------------------------------------------------------------------------------
/rscripts/ANOVA.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does an ANalysis Of VAriance (ANOVA) and gives the summary of the test.", "ALG_CREATOR": "JEANDENANS L.", "Field3": "The third field", "Field1": "The first field", "Layer": "Input vector with at least 5 numerics fields.", "Field4": "The fourth field", "Field5": "The fifth field", "Field2": "The second field", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/scripts/Cut_by_field.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Cut one layer by the other for features that share a common value in specified fields. Keep the inside of the first layer", "Cut_Key_Field": "Field that contains the shared value with cut polygons", "ALG_CREATOR": "Chourmo - v1", "Key_Field": "Field that contains the shared value with cutting polygons", "ALG_HELP_CREATOR": "Chourmo - v1", "Polygons": ""}
--------------------------------------------------------------------------------
/rscripts/Autocor_spatiale.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script calculates the spatial corelation with the Moran's test ", "nb_vois": "number of neighboors in the moran's test", "nb_simul": "number of simulation in the moran's test", "ALG_CREATOR": "JEANDENANS L.", "Layer": "Input vector with at least one field", "Field": "the field", "RPLOTS": "histogram of residues", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/scripts/Square_grid_from_layer_extent.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script creates a square grid for the input layer extent. \nThe grid size can be chosen by the user.", "ALG_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "cellsize": "The size of each grid cell in map units.", "grid": "The output vector grid.", "ALG_HELP_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "input": "The input vector layer", "ALG_VERSION" : 1}
--------------------------------------------------------------------------------
/scripts/Buffer Contour.py.help:
--------------------------------------------------------------------------------
1 | {"Value_field": "", "ALG_DESC": "Make circular buffer contours around points to each level value", "ALG_CREATOR": "Chourmo", "Points": "", "Levels": "Levels in Value_field unit, separated by a semicolon", "Group_by_field": "A different buffer will be created for each unique field value", "Buffer_parameter": "buffer size = (level - value) * buffer parameter", "ALG_VERSION" : 1}
--------------------------------------------------------------------------------
/rscripts/ACP_var.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does a Principal Component Analysis (PCA) and gives the representation of variables.", "ALG_CREATOR": "JEANDENANS L.", "Field3": "The third field", "Field1": "The first field", "Layer": "Input vector with at least 4 fields", "Field4": "The fourth field", "Field2": "The second field", "RPLOTS": "the plot of variables of the PCA", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/rscripts/Regular_sampling_grid.rsx.help:
--------------------------------------------------------------------------------
1 | {"Output": "Sampled points", "ALG_DESC": "This scripts samples point location within a given polygon(s) using a regular (systematically aligned) sampling method. The methods used assume that the geometry used is not spherical, so objects should be in planar coordinates.", "Layer": "A vector layer containing polygons.\n", "Size": "Number of sample points to be generated.\n"}
--------------------------------------------------------------------------------
/scripts/Remove_parts.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Delete holes and polygins less than specified size\nRequires Shapelib", "ALG_CREATOR": "Chourmo - v1", "Max_Area": "Theshold for deleting holes, in the same unit as the polygon layer.", "Delete_holes": "If no, only delete polygons and polygon parts in multipolygon, else also delete holes ('rings') in polygons", "ALG_HELP_CREATOR": "Chourmo - v1", "Polygons": "Polygon layer"}
--------------------------------------------------------------------------------
/rscripts/Multiple_Regression.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##Field2=Field Layer
5 | ##Field3=Field Layer
6 | ##Field4=Field Layer
7 | ##Field5=Field Layer
8 | ##Field6=Field Layer
9 | ##Field7=Field Layer
10 | library(ade4)
11 | test<-lm(Layer[[Field1]]~Layer[[Field2]]+Layer[[Field3]]+Layer[[Field4]]+Layer[[Field5]]+Layer[[Field6]]+Layer[[Field7]])
12 | summary(test)
13 |
--------------------------------------------------------------------------------
/rscripts/ternaryPlots.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "You can easily creat ternary plots in QGIS with ggplot2 and ggtern package!\n\nrequires ggplot2 and ggtern packages", "Group": "Optional grouping Field", "ALG_CREATOR": "Matteo Ghetta", "Layer": "Vector layer you want to use to calculate the ternary plot", "ALG_VERSION": "0.1", "RPLOTS": "ternary plot", "ALG_HELP_CREATOR": "Matteo Ghetta", "Y": "Y Field", "X": "X Field", "Z": "Z Field"}
--------------------------------------------------------------------------------
/scripts/Merge_all_lines_in_layer.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Merge all lines/polylines in layer", "Input_lines": "Lines layer", "ALG_CREATOR": "Alexander Muriy\n\n(Institute Of Environmental Geoscience [IEG \nRAS], Moscow, Russia)\n[amuriy AT gmail DOT com]", "ALG_VERSION": "1.0", "ALG_HELP_CREATOR": "Alexander Muriy\n\n(Institute Of Environmental Geoscience [IEG \nRAS], Moscow, Russia)\n[amuriy AT gmail DOT com]", "Merged_lines": ""}
--------------------------------------------------------------------------------
/scripts/realcentroid_algorithm.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_CREATOR": "Zoltan Siki", "output": "point layer (pointOnSurface)", "ALG_DESC": "This script generates at internal point of the polygons in a vector layer aka pointOnSurface (GEOS).\nAttributes are copied from the polygons to the output points.\nSingle point will be generated for multipolygons.", "ALG_VERSION": "1.0", "ALG_HELP_CREATOR": "Zoltan Siki", "poly": "input polygon layer"}
2 |
--------------------------------------------------------------------------------
/rscripts/Close_neighbor.rsx.help:
--------------------------------------------------------------------------------
1 | {"distance": "a number >0", "ALG_DESC": "This script selects points which are separated by a distance equal or lower than a distance d and gives a shapefile with points selected.", "ALG_CREATOR": "JEANDENANS L.", "Field1": "the name of this points in order to identify objects", "Layer": "Input vector with points", "ALG_HELP_CREATOR": "JEANDENANS L.", "Output": "a shapefile with data which are selected"}
--------------------------------------------------------------------------------
/rscripts/Extract_points_from_line.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##nombre=number 10
4 | ##Output= output vector
5 | library(sp)
6 | Coord<-Layer@lines[[nombre]]@Lines[[1]]@coords
7 | x<-as.numeric(Coord[,1])
8 | y<-as.numeric(Coord[,2])
9 | X<-cbind(x,y)
10 | matrix<-as.matrix(X)
11 | matrix<-SpatialPointsDataFrame(matrix, as.data.frame(Coord),proj4string=CRS("+init=epsg:2154"))
12 | Output=matrix
13 |
--------------------------------------------------------------------------------
/scripts/Read_file_content_into_string.py:
--------------------------------------------------------------------------------
1 | ##Utils=group
2 | ##Read file content into string=name
3 | ##Source_file=file
4 | ##output=output string
5 |
6 | import os
7 |
8 | string = ""
9 | # Read only the first 10Mb of the file
10 | with open(Source_file, 'r') as f:
11 | string = f.read(10485760)
12 |
13 | progress.setInfo('First 1000 characters of the output string: %s' % string[:1000])
14 |
15 | output = string
--------------------------------------------------------------------------------
/rscripts/Kriging.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does a kriging with the best variogram which is selecd with the function auto.Krige and gives a raster of the spatial interpolation. Fields can be numerics or not because in the script, there is a coding to numeric.", "ALG_CREATOR": "JEANDENANS L.", "Layer": "Input vector", "Field": "the field", "ALG_HELP_CREATOR": "JEANDENANS L.", "Output": "a raster of the kriging", "by": "the pixel size"}
--------------------------------------------------------------------------------
/rscripts/scatterplot_types.rsx:
--------------------------------------------------------------------------------
1 | ##Vector processing=group
2 | ##showplots
3 | ##Layer=vector
4 | ##X=Field Layer
5 | ##Y=Field Layer
6 | ##type=selection point;lines;point&lines
7 | if (type == 0) {
8 | plot(Layer[[X]], Layer[[Y]], xlab=X, ylab=Y)
9 | } else if (type == 1) {
10 | plot(Layer[[X]], Layer[[Y]], xlab=X, ylab=Y, type="l")
11 | } else if (type == 2) {
12 | plot(Layer[[X]], Layer[[Y]], xlab=X, ylab=Y, type="b")
13 | }
14 |
--------------------------------------------------------------------------------
/rscripts/Ripley_-_Rasson_spatial_domain.rsx:
--------------------------------------------------------------------------------
1 | ##Point pattern analysis=group
2 | ##Layer=vector point
3 | ##Output=output vector
4 | library("spatstat")
5 | library("maptools")
6 | proj4string(Layer)->crs
7 | spatpoints = as(Layer,"SpatialPoints")
8 | ripras=ripras(as(spatpoints,"ppp"))
9 | polyg=as(ripras,"SpatialPolygons")
10 | Output1= SpatialPolygonsDataFrame(polyg, data.frame(1))
11 | proj4string(Output1)<-crs
12 | Output<-Output1
13 |
--------------------------------------------------------------------------------
/rscripts/ACP_cercle.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does a Principal Component Analysis (PCA) with 4 fields and gives the circle of correlations.", "Field2": "The second field", "Field3": "The third field", "Field1": "The first field", "Layer": "Input vector with at least 4 fields.", "Field4": "The fourth field", "ALG_CREATOR": "JEANDENANS L.", "RPLOTS": "the plot of the map of a correlation circle", "ALG_HELP_CREATOR": "JEANDENANS L.", "null": ""}
--------------------------------------------------------------------------------
/rscripts/Tobler.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script is a Tobler\u2019s Hiking Function and gives a shapefile with the best way.", "y_depart": "coordinate Y of the starting point", "ALG_CREATOR": "JEANDENANS L.", "x_depart": "coordinate X of the starting point", "Layer": "a raster with elevation", "y_arrivee": "coordinate Y of the end point", "x_arrivee": "coordinate X of the end point", "ALG_HELP_CREATOR": "JEANDENANS L.", "Output": "a vector with the way"}
--------------------------------------------------------------------------------
/rscripts/Douglas-Peucker.rsx.help:
--------------------------------------------------------------------------------
1 | {"distance": "degree of simplification: the limit distance between the simplificated line and points of polygon.", "ALG_DESC": "This script is the algorithm of Douglas-Peucker and gives simplificated lines.", "Output2": "a shapfile with the simplification of outline", "Output1": "a shapfile with the simplification of border", "Layer": "Input vector with polygons", "ALG_CREATOR": "JEANDENANS L.", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/rscripts/Ripley_-_Rasson_spatial_domain.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_CREATOR": "Victor Olaya, volayaf(at)gmail.com", "Output": "Riplay-Rasson estimate of the spatial domain.", "ALG_DESC": "This script computes the Ripley-Rasson estimate of the spatial domain from which an observed pattern of points came.\n\nR dependencies: library \"maptools\" and \"spatstat\"", "Layer": "A vector layer contain a point pattern.", "ALG_HELP_CREATOR": "Filipe S. Dias, filipesdias(at)gmail.com"}
--------------------------------------------------------------------------------
/rscripts/AFDM.rsx.help:
--------------------------------------------------------------------------------
1 | {"Field1 ": "The first field", "ALG_DESC": "This script does a Factor Analysis of Mixed Data (FAMD) and gives the circle of correlations.", "ALG_CREATOR": "JEANDENANS L.", "Field3 ": "The third field", "Layer": "Input vector with at least 6 fields.", "Field2 ": "The second field", "RPLOTS": "plot of the correlation circle", "ALG_HELP_CREATOR": "JEANDENANS L.", "Field6 ": "The sixth field", "Field5 ": "The fifth field", "Field4 ": "The fourth field"}
--------------------------------------------------------------------------------
/rscripts/CAH.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does a Hierarchical Cluster Analysis (HCA) and gives the dendogram.", "ALG_CREATOR": "JEANDENANS L.", "Field3": "The third field", "Field1": "The first field", "Layer": "Input vector with at least 5 fields.", "Field4": "The fourth field", "Field5": "The fifth field", "Field2": "The second field", "RPLOTS": "the plot of the dendrogram of the analysis", "ALG_HELP_CREATOR": "JEANDENANS L.", "method": "choice of the method of HCA."}
--------------------------------------------------------------------------------
/rscripts/Random_sampling_grid.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This scripts samples point location within a given polygon(s) using a random sampling method. The used methods assume that the geometry used is not spherical, so objects should be in planar coordinates.\n\n", "ALG_CREATOR": "Victor Olaya", "Layer": "A vector layer containing polygons.", "ALG_HELP_CREATOR": "Filipe Dias", "Output": "Randomly generated random points.", "Size": "Number of sample points to be randomly generated"}
--------------------------------------------------------------------------------
/scripts/Save_features_filtered_by_expression.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This scripts saves the data from a vector layer filtered by the given expression.\n", "ALG_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "Vector_layer": "The source vector layer.", "ALG_HELP_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "output": "The output vector layer corresponding to the matching features.", "Expression": "The expression to use as a filter. You can use any valid QGIS expression.", "ALG_VERSION" : 1}
--------------------------------------------------------------------------------
/rscripts/frequency_plot.rsx:
--------------------------------------------------------------------------------
1 | ##Vector processing=group
2 | ##showplots
3 | ##Layer=vector
4 | ##Field=Field Layer
5 | plot(ecdf(Layer[[Field]]), verticals=T, pch=46, main="Frequency plot", xlab=Field) +
6 | abline(v=median(Layer[[Field]]), col="red") +
7 | text(median(Layer[[Field]]), 0.2, "median", col="red", srt=90, adj=c(-0.1, -0.1)) +
8 | abline(v=mean(Layer[[Field]]), col="green") +
9 | text(mean(Layer[[Field]]), 0.2, "mean", col="green", srt=90, adj=c(2, -0.1))
10 |
--------------------------------------------------------------------------------
/rscripts/ACP_contribution.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does a Principal Component Analysis (PCA) and gives the plot of the contribution of each field to an axis.", "ALG_CREATOR": "JEANDENANS L.", "Field3": "The third field", "Field1": "The first field", "Layer": "Input vector with at least 4 fields.", "Field4": "The fourth field", "Field2": "The second field", "RPLOTS": "The plot of the contribution of fields to the construction of the axis of PCA.", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/scripts/Fill_holes.py.help:
--------------------------------------------------------------------------------
1 | {"Max_area": "maximum area size limit defining which holes should be filled", "Area": "Maximum area for holes in result", "ALG_CREATOR": "Chourmo - v1", "ALG_DESC": "Delete all holes (or \"rings\") in polygons or multipolygons up to a given area limit", "Results": "output polygon or multipolygon layer with filled holes", "ALG_VERSION": "1", "ALG_HELP_CREATOR": "Chourmo (and extended by Anita) - v1 ", "Polygons": "input polygon or multipolygon layer with holes"}
2 |
--------------------------------------------------------------------------------
/rscripts/Kriging_with_model_selection.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does a kriging and we can select the model of the variogram; It gives a raster of the spatial interpolation. Fields can be numerics or not because in the script, there is a coding to numeric.", "Selection": "the list of differents models of variogram", "ALG_CREATOR": "JEANDENANS L.", "Layer": "Input vector", "Field": "The field", "ALG_HELP_CREATOR": "JEANDENANS L.", "Output": "a raster of the kriging", " by": "the pixel size"}
--------------------------------------------------------------------------------
/scripts/Read_file_content_into_string.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "output": "String containing the file content.", "ALG_DESC": "This script read the input file content and pass it as a string output.\n\nIt can then be used in another chained algorithm.\n\nNote : only the first 10 Mo of the file are passed to the output.", "Source_file": "The text file containing the data to read and pass to the output.", "ALG_HELP_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "ALG_VERSION" : 1}
--------------------------------------------------------------------------------
/scripts/make_landsat8_footprints.py.help:
--------------------------------------------------------------------------------
1 | {"url": "The gzipped CSV from https://aws.amazon.com/public-data-sets/landsat/", "ALG_DESC": "Make a polygon shapefile from the gzipped CSV provided at Amazon AWS", "output_layer": "The polygon shapefile output", "ALG_CREATOR": "Jelmer Oosthoek (jhpoosthoek@gispla.net)\nthanks to https://www.planet.com/pulse/planet-labs-celebrates-landsat-8-data-is-finally-available-on-amazon-web-services/", "ALG_HELP_CREATOR": "Jelmer Oosthoek (jhpoosthoek@gispla.net)"}
2 |
--------------------------------------------------------------------------------
/scripts/Hex_grid_from_layer_bounds.py:
--------------------------------------------------------------------------------
1 | ##Polygons=group
2 | ##input=vector
3 | ##cellsize=number 1000.0
4 | ##grid=output vector
5 |
6 | input = processing.getObject(input)
7 |
8 | extent = input.extent()
9 | extent = '%f,%f,%f,%f' %(input.extent().xMinimum()-cellsize/2, input.extent().xMaximum()+cellsize/2, input.extent().yMinimum()-cellsize/2, input.extent().yMaximum()+cellsize/2)
10 |
11 | processing.runalg('qgis:creategrid', 3, extent, cellsize, cellsize, input.crs().authid(),grid)
12 |
--------------------------------------------------------------------------------
/rscripts/Summarize_by_field.rsx:
--------------------------------------------------------------------------------
1 | ##Layer=vector
2 | ##Numeric_field=Field Layer
3 | ##Factor=Field Layer
4 | ##Folder=folder
5 | library(doBy)
6 | library(plyr)
7 | library(foreign)
8 | setwd(Folder)
9 | table1<-data.frame(num=Layer[[Numeric_field]],f=Layer[[Factor]])
10 | output<-summaryBy(num~f, data=table1,FUN=c(sum,mean,var))
11 | output1<-rename(output,c("num.sum"="Sum","num.mean"="Mean","num.var"="Variance","f"=Factor))
12 | write.dbf(output1,file=paste("summarize_output", ".dbf", sep = ""))
13 |
--------------------------------------------------------------------------------
/rscripts/Simple_Linear_Regression.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script computes a Simple Linear Regression Y~X and gives plots of a regression linear:(1) Residuals vs Fitted, (2) Normal Q-Q, (3) Scale-Location, (4) Cook's distance", "ALG_CREATOR": "JEANDENANS L.", "Field1": "The first field.", "Layer": "Input vector with at least 2 fields.", "Field2": "The second field.", "RPLOTS": "plots of a regression linear:\n- Residuals vs Fitted\n- Normal Q-Q\n- Scale-Location\n- Cook's distance", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/rscripts/Summary_statistics.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This tool calculates the following summary statistics for a numeric field: (1) Sum, (2) Count, (3) Unique values, (4) Minimum value, (5) Maximum value, (6) Range, (7) Mean, (8) Median and (9) Standard deviation.\n\n", "R_CONSOLE_OUTPUT": "Summary statistics table", "ALG_CREATOR": "Filipe S. Dias, filipesdias(at)gmail.com ", "Layer": "Input vector with at least one numeric field", "Field": "Numeric field", "ALG_HELP_CREATOR": "Filipe S. Dias, filipesdias(at)gmail.com "}
--------------------------------------------------------------------------------
/rscripts/ACP_cercle.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##Field2=Field Layer
5 | ##Field3=Field Layer
6 | ##Field4=Field Layer
7 | ##showplots
8 | library(ade4)
9 | library(rpanel)
10 | library(spatstat)
11 | donne<-cbind(Layer[[Field1]], Layer[[Field2]], Layer[[Field3]], Layer[[Field4]])
12 | donne<-na.exclude(donne)
13 | donne<-as.data.frame(donne)
14 | names(donne)<- c(Field1, Field2, Field3, Field4)
15 | acp<- dudi.pca(donne, center = T, scale = T, scannf = F)
16 | s.corcircle(acp$co)
--------------------------------------------------------------------------------
/rscripts/Advanced_raster_histogram.rsx:
--------------------------------------------------------------------------------
1 | ##Layer=raster
2 | ##Raster processing=group
3 | ##Dens_or_Hist=string Hist
4 | ##showplots
5 | library(rpanel)
6 | library(rasterVis)
7 | str <- Dens_or_Hist
8 | if (str !='Dens' & str != 'Hist'){
9 | rp.messagebox('you must enter "Dens" or "Hist"', title = 'oops!')
10 | } else {
11 | if (nbands(Layer) == 1) {
12 | Layer <- as.matrix(Layer)
13 | Layer <- raster(Layer)
14 | }
15 | if (str == 'Dens') {
16 | densityplot(Layer)
17 | } else if (str == 'Hist') {
18 | histogram(Layer)
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/rscripts/scatterplot_log.rsx:
--------------------------------------------------------------------------------
1 | ##Vector processing=group
2 | ##showplots
3 | ##Layer=vector
4 | ##X=Field Layer
5 | ##Y=Field Layer
6 | ##type=selection No logarithm;logarithm X;logarithmY;logarithm X and Y
7 | if (type == 0) {
8 | plot(Layer[[X]], Layer[[Y]], xlab=X, ylab=Y)
9 | }
10 | if (type == 1) {
11 | plot(log(Layer[[X]]), Layer[[Y]], xlab=X, ylab=Y)
12 | } else if (type == 2) {
13 | plot(Layer[[X]], log(Layer[[Y]]), xlab=X, ylab=Y)
14 | } else if (type == 3) {
15 | plot(log(Layer[[X]]), log(Layer[[Y]]), xlab=X, ylab=Y)
16 | }
17 |
--------------------------------------------------------------------------------
/rscripts/Douglas-Peucker_with_choice.rsx.help:
--------------------------------------------------------------------------------
1 | {"distance": "degree of simplification: the limit distance between the simplificated line and points of polygon.", "ALG_DESC": "This script is the algorithm of Douglas-Peucker with the choice of the polygon which is simplified and gives a shapefile with this polygon.", "ALG_CREATOR": "JEANDENANS L.", "Layer": "a set of data (polygons)", "numero": "the number of the polygon which we want simplify", "ALG_HELP_CREATOR": "JEANDENANS L.", "Output": "a shafile with the simplification of the polygon's border"}
--------------------------------------------------------------------------------
/rscripts/Alpha_shape.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##alpha=number 10
4 | ##Output= output vector
5 | library(rgdal)
6 | library(alphahull)
7 | library(maptools)
8 | alpha_points = ashape(coordinates(Layer),alpha=alpha)
9 | l <- list()
10 | for (i in 1:nrow(alpha_points$edges)) {
11 | l[[i]] <- Line(rbind(alpha_points$edges[i, 3:4], alpha_points$edges[i, 5:6]))
12 | }
13 | l <- list(Lines(l, as.character("1")))
14 | sldf <- SpatialLinesDataFrame(SpatialLines(l), data.frame(name ="ashape"), match.ID = FALSE)
15 | Output=sldf
--------------------------------------------------------------------------------
/rscripts/Kernel_density_estimation.rsx.help:
--------------------------------------------------------------------------------
1 | {"Layer2": "Input vector of polygons on the same area.", "ALG_DESC": "This script computes the Kernel density estimation and gives a raster with estimations. We can choose if we want the correction for a possible frontier bias (method 1) or not (method 0).", "ALG_CREATOR": "JEANDENANS L.", "Layer1": "Input vector with field which we want to estimate.", "methode": "a number (1 : without correction or 0: with correction)", "ALG_HELP_CREATOR": "JEANDENANS L.", "Output": "a raster with the kernel density estimation"}
--------------------------------------------------------------------------------
/rscripts/Distance.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer1=vector
3 | ##Layer2=vector
4 | ##Field=Field Layer1
5 | ##output= output vector
6 | library(geosphere)
7 | library(rgeos)
8 | line<-gLineMerge(Layer2, byid=FALSE, id = NULL)
9 | x<-coordinates(Layer1)
10 | X<-dist2Line(x, line, distfun=distHaversine)
11 | matrix<-as.matrix(X[,2:3])
12 | X<-cbind(X, as.data.frame(Layer1[[Field]]))
13 | result<-SpatialPointsDataFrame(matrix, as.data.frame(X, row.names=NULL))
14 | proj4string(Layer1)->crs
15 | proj4string(result)<-crs
16 | output<-result
--------------------------------------------------------------------------------
/scripts/Save_selected_features.py:
--------------------------------------------------------------------------------
1 | ##Vector=group
2 | ##input=vector
3 | ##output=output vector
4 |
5 | from qgis.core import *
6 | from processing.tools.vector import VectorWriter
7 |
8 | vectorLayer = processing.getObject(input)
9 |
10 | provider = vectorLayer.dataProvider()
11 |
12 | writer = VectorWriter(output, None, provider.fields(),
13 | provider.geometryType(), vectorLayer.crs())
14 |
15 | features = processing.features(vectorLayer)
16 | for feat in features:
17 | writer.addFeature(feat)
18 |
19 | del writer
20 |
--------------------------------------------------------------------------------
/rscripts/ACP_individus.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does a Principal Component Analysis (PCA) and gives a representation of persons.", "ALG_CREATOR": "JEANDENANS L.", "Field3": "The third field", "Field1": "The first field", "Layer": "Input vector with at least 4 fields.", "Field4": "The fourth field", "Field2": "The second field", "RPLOTS": "The plot of persons of the PCA with two persons selected", "ALG_HELP_CREATOR": "JEANDENANS L.", "Individu2": "a number of a person who we want to see on the plot", "Individu1": "a number of a person who we want to see on the plot"}
--------------------------------------------------------------------------------
/rscripts/Multiple_Regression.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does a multiple regression with 6 explicatives variables and gives the summary of the test.", "ALG_CREATOR": "JEANDENANS L.", "Field3": "The second explicative field.", "Field1": "The explained field.", "Field6": "The fifth explicative field.", "Layer": "Input vector with at least 7 fields.", "Field4": "The third explicative field.", "Field5": "The fourth explicative field.", "Field2": "The first explicative field.", "Field7": "The sixth explicative field.", "ALG_HELP_CREATOR": "JEANDENANS L.", "null": ""}
2 |
--------------------------------------------------------------------------------
/scripts/ellipsoidal_area.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Calculates the area of each feature using the given ellipsoid. The area is added as a new attribute to the output layer.", "new_field": "Name of the area field to add", "ellipsoid": "Ellipsoid to use in area calculation. Defaults to WGS84. Accepts all PROJ.5 list of ellipsoids", "ALG_CREATOR": "Ujaval Gandhi\nujaval@google.com", "ALG_VERSION": "1.0", "ALG_HELP_CREATOR": "Ujaval Gandhi", "input": "Input polygon layer. Accepts both polygons and multipolygons", "units": "units for area calculation", "output": "Output polygon layer."}
2 |
--------------------------------------------------------------------------------
/rscripts/Summarize_by_two_fields.rsx:
--------------------------------------------------------------------------------
1 | ##Layer=vector
2 | ##Numeric_field=Field Layer
3 | ##Factor1=Field Layer
4 | ##Factor2=Field Layer
5 | ##Folder=folder
6 | library(doBy)
7 | library(plyr)
8 | library(foreign)
9 | setwd(Folder)
10 | table1<-data.frame(num=Layer[[Numeric_field]],f1=Layer[[Factor1]],f2=Layer[[Factor2]])
11 | output<-summaryBy(num~f1+f2, data=table1,FUN=c(sum,mean,var))
12 | output1<-rename(output,c("num.sum"="Sum","num.mean"="Mean","num.var"="Variance","f1"=Factor1,"f2"=Factor2))
13 | write.dbf(output1,file=paste("summarize_output", ".dbf", sep = ""))
14 |
--------------------------------------------------------------------------------
/rscripts/barplots.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Creates a bar plot with vertical bars using barplot().",
2 |
3 | "ALG_CREATOR": "Alessandro Samuel-Rosa (alessandrosamuelrosa at gmail.com)",
4 |
5 | "ALG_HELP_CREATOR": "Alessandro Samuel-Rosa (alessandrosamuelrosa at gmail.com)",
6 |
7 | "ALG_VERSION": "1.0-0",
8 |
9 | "Layer": "Input point vector layer.",
10 |
11 | "Field": "Field of the attribute table of the point vector layer passed to Layer holding data of type String for plotting."
12 | }
13 |
--------------------------------------------------------------------------------
/scripts/spatial_cross_join_attributes.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Iterates over all features of cover layer, and gets feafeatures from join layer which are crosses cover features extent.\n\nFor all founded pair of features write a row with values of attributes.\n\nIn other words, makes \nselect a.*, b.* from a, b where ST_Intersects(a.geom, b.geom) \n\nanalog and writes results as csv file.", "cover_layer_name": "", "ALG_CREATOR": "github: kiselev-dv", "ALG_VERSION": "1.0", "result": "CSV file with attribute values (\\t is separator, \" is escape symbol)", "join_layer_name": "", "ALG_HELP_CREATOR": ""}
2 |
--------------------------------------------------------------------------------
/rscripts/AFC.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script does a Correspondence Analysis (CA) and gives differents plots:(1) a plot with the percentage of variance, (2) a plot of the factorail design, (3) a plot with the correlation circle", "ALG_CREATOR": "JEANDENANS L.", "Field3": "The third field", "Field1": "The first field", "Layer": "Input vector with at least 4 fields", "Field4": "The fourth field", "Field2": "The second field", "RPLOTS": "differents plots:\n- a plot with the percentage of variance\n- a plot of the factorail design\n- a plot with the correlation circle", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/rscripts/Advanced_raster_histogram.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_CREATOR": "", "ALG_DESC": "This algorithm generates a histogram or a density plot for the given raster. NOTE that you should not use this algorithm to process large rasters.\n\nR dependencies: rpanel, rasterVis. If you are using Linux you need to install \"tcktk\" and \"BWidget\" from your package master.", "Dens_or_Hist": "Use 'hist' to produce histogram of the raster values (separate plots for each band) and 'dens' if you want to create a density plot (single plot for all bands).", "RPLOTS": "Raster histogram.", "Layer": "A single- or multi-band raster."}
--------------------------------------------------------------------------------
/rscripts/Summary_statistics.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field=Field Layer
4 | Summary_statistics<-data.frame(rbind(sum(Layer[[Field]]),
5 | length(Layer[[Field]]),
6 | length(unique(Layer[[Field]])),
7 | min(Layer[[Field]]),
8 | max(Layer[[Field]]),
9 | max(Layer[[Field]])-min(Layer[[Field]]),
10 | mean(Layer[[Field]]),
11 | median(Layer[[Field]]),
12 | sd(Layer[[Field]])),row.names=c("Sum:","Count:","Unique values:","Minimum value:","Maximum value:","Range:","Mean value:","Median value:","Standard deviation:"))
13 | colnames(Summary_statistics)<-c(Field)
14 | >Summary_statistics
15 |
--------------------------------------------------------------------------------
/help_converter.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import pickle
3 | import json
4 |
5 | def convertHelp( filePath ):
6 | with open(filePath, 'rb') as f:
7 | try:
8 | descriptions = pickle.load(f)
9 | with open(filePath, 'w') as f:
10 | f.write(json.dumps(descriptions))
11 | except :
12 | pass
13 |
14 | for filename in glob.glob('scripts/*.help'):
15 | convertHelp( filename )
16 |
17 | for filename in glob.glob('rscripts/*.help'):
18 | convertHelp( filename )
19 |
20 | for filename in glob.glob('models/*.help'):
21 | convertHelp( filename )
22 |
--------------------------------------------------------------------------------
/rscripts/Quadrat_analysis.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "The script divides the window into quadrats and counts the numbers of points in each quadrat. Then it performs a test of Complete Spatial Randomness based on the quadrat counts.\n\nR dependencies: library \"maptools\" and \"spatstat\"", "R_CONSOLE_OUTPUT": "The results of the Chi-squared test of complete spatial randomness (CSR) using quadrat counts.\n", "ALG_CREATOR": "Victor Olaya, volayaf(at)gmail.com", "Layer": "A vector containg a point pattern.", "RPLOTS": "A display containing the number of points per quadrat.", "ALG_HELP_CREATOR": "Filipe S. Dias, filipesdias(at)gmail.com"}
--------------------------------------------------------------------------------
/rscripts/ACP_contribution.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##Field2=Field Layer
5 | ##Field3=Field Layer
6 | ##Field4=Field Layer
7 | ##showplots
8 | library(ade4)
9 | library(rpanel)
10 | library(spatstat)
11 | donne<-cbind(Layer[[Field1]], Layer[[Field2]], Layer[[Field3]], Layer[[Field4]])
12 | donne<-na.exclude(donne)
13 | donne<-as.data.frame(donne)
14 | names(donne)<- c(Field1, Field2, Field3, Field4)
15 | acp<- dudi.pca(donne, center = T, scale = T, scannf = F)
16 | contribution<-inertia.dudi(acp,col.inertia = T)$col.abs
17 | plot(contribution)
18 | text(contribution ,row.names(contribution))
19 |
--------------------------------------------------------------------------------
/scripts/Create_tiling_from_vector_layer.py:
--------------------------------------------------------------------------------
1 | ##Polygons=group
2 | ##input=vector
3 | ##numpolygons=number 10
4 | ##polygons=output vector
5 |
6 | input = processing.getObject(input)
7 |
8 | centerx = (input.extent().xMinimum() + input.extent().xMaximum()) / 2
9 | centery = (input.extent().yMinimum() + input.extent().yMaximum()) / 2
10 | width = input.extent().xMaximum() - input.extent().xMinimum()
11 | cellsize = width / numpolygons
12 | height = input.extent().yMaximum() - input.extent().yMinimum()
13 |
14 | processing.runalg('qgis:creategrid', cellsize, height, width, height,
15 | centerx, centery, 1, input.crs(), polygons)
16 |
--------------------------------------------------------------------------------
/rscripts/ternaryPlots.rsx:
--------------------------------------------------------------------------------
1 | ##ggplot=group
2 | ##showplots
3 | ##Layer=Vector
4 | ##X= Field Layer
5 | ##Y=Field Layer
6 | ##Z=Field Layer
7 | ##Group=optional Field Layer
8 | library("ggplot2")
9 | library("ggtern")
10 | if (is.null(Group)){
11 | ggplot()+
12 | geom_point(aes(Layer[[X]],Layer[[Y]], Layer[[Z]]))+
13 | coord_tern() +
14 | xlab(X)+
15 | ylab(Y) +
16 | zlab(Z) +
17 | theme_showarrows()
18 | } else {
19 | ggplot()+
20 | geom_point(aes(Layer[[X]],Layer[[Y]], Layer[[Z]], color=as.factor(Layer[[Group]])))+
21 | coord_tern() +
22 | xlab(X)+
23 | ylab(Y) +
24 | zlab(Z) +
25 | theme_showarrows()+
26 | theme(legend.title=element_blank())
27 | }
28 |
--------------------------------------------------------------------------------
/rscripts/AFDM.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1 = Field Layer
4 | ##Field2 = Field Layer
5 | ##Field3 = Field Layer
6 | ##Field4 = Field Layer
7 | ##Field5 = Field Layer
8 | ##Field6 = Field Layer
9 | ##showplots
10 | library(FactoMineR)
11 | library(ade4)
12 | Layer<-as.data.frame(Layer)
13 | donne<-cbind(Layer[[Field1]],Layer[[Field2]],Layer[[Field3]],Layer[[Field4]],Layer[[Field5]],Layer[[Field6]])
14 | colnames(donne)<-c(Field1, Field2, Field3,Field4,Field5,Field6)
15 | X<-FAMD (donne, ncp = 5, graph =FALSE, sup.var = NULL,
16 | ind.sup = NULL, axes = c(1,2), row.w = NULL, tab.comp = NULL)
17 | s.corcircle(X$var$coord[,1:2])
18 |
--------------------------------------------------------------------------------
/rscripts/Selection_with_criterion_choice.rsx.help:
--------------------------------------------------------------------------------
1 | {"Field1 ": "The explained field", "ALG_DESC": "This script does a selection of variables. We can choose the model of selection between: exhaustive serach, forward or backward stepwise, or sequential replacement. And we can select the criterion used in the selection.", "ALG_CREATOR": "JEANDENANS L.", "Criteres": "a list of criterions", "Layer": "Input vector.", " Field1 ": "the variable which is explained by the others", "nbr_var ": "a number which is the number of variables which we want in the best model", "Methode": "list of methods", "ALG_HELP_CREATOR": "JEANDENANS L.", "Field2 ": "The field which be used to identify the objects."}
--------------------------------------------------------------------------------
/scripts/Define_1_raster_layer_properties.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Define some properties for a single raster : style, CRS, etc.", "Coordinate_Reference_System": "Coordinate Reference System (CRS) to set for the raster layer.\n\nNote: this will not reproject the layer data.", "ALG_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "QML_file": "[optionnal] If needed, choose the QML style file to apply to the raster.", "Raster_layer": "The input raster layer.", "ALG_HELP_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "Refresh_contrast_enhancement": "Refresh the contrast enhancement for the raster band.", "Save_layer_style_as_default": "If true, the layer style will be save as the default style.", "ALG_VERSION" : 1}
--------------------------------------------------------------------------------
/scripts/Points_from_vector.py:
--------------------------------------------------------------------------------
1 | ##Points=group
2 | ##Input_raster=raster
3 | ##Input_vector=vector
4 | ##Output_layer=output vector
5 |
6 | from qgis.core import *
7 |
8 | vector = processing.getObject(Input_vector)
9 | raster = processing.getObject(Input_raster)
10 |
11 | geometryType = vector.geometryType()
12 | if geometryType == QGis.Point:
13 | processing.runalg('qgis:saveselectedfeatures', vector, Output_layer)
14 | elif geometryType == QGis.Line:
15 | processing.runalg('qgis:generatepointspixelcentroidsalongline', raster, vector, Output_layer)
16 | elif geometryType == QGis.Polygon:
17 | processing.runalg('qgis:generatepointspixelcentroidsinsidepolygons', raster, vector, Output_layer)
18 |
--------------------------------------------------------------------------------
/rscripts/A-star.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script is the algorithm A-star and gives a raster with the best way. We choose the limit of elevation in order to put obstacles and we have to choose the number of directions between 8 and 4 (with or not diagonals) .", "Layer0": "a raster of the study area.", "ALG_CREATOR": "JEANDENANS L.", "xA": "coordinate X of the starting point", "direction": "number of directions between 4 or 8", "xB": "coordinate X of the end point", "yA": "coordinate Y of the starting point", "ALG_HELP_CREATOR": "JEANDENANS L.", "Output": "a raster of the same area with the way", "yB": "coordinate Y of the end point", "hauteur": "the limit of elevation. Coordinates with a higher elevation become an obstacle."}
--------------------------------------------------------------------------------
/rscripts/ACP_var.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##Field2=Field Layer
5 | ##Field3=Field Layer
6 | ##Field4=Field Layer
7 | ##showplots
8 | library(ade4)
9 | library(rpanel)
10 | library(spatstat)
11 | donne<-cbind(Layer[[Field1]], Layer[[Field2]], Layer[[Field3]], Layer[[Field4]])
12 | donne<-na.exclude(donne)
13 | donne<-as.data.frame(donne)
14 | names(donne)<- c(Field1, Field2, Field3, Field4)
15 | acp<- dudi.pca(donne, center = T, scale = T, scannf = F)
16 | cl1<-acp$li[,1]
17 | cc1<-acp$co[,1]
18 | cl2<-acp$li[,2]
19 | cc2<-acp$co[,2]
20 | plot(cc1,cc2,type="n", main="Les variables", xlim=c(-1,1), ylim=c(-1,1), asp=1, ylab= "",xlab= "")
21 | abline(h=0,v=0)
22 | text(cc1,cc2,row.names(acp$co))
--------------------------------------------------------------------------------
/rscripts/Autocor_spatiale.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field=Field Layer
4 | ##nb_simul=number 100
5 | ##nb_vois=number 10
6 | ##showplots
7 | library(spdep)
8 | coords<-coordinates(Layer)
9 | Y<-knearneigh(coords, k=nb_vois,longlat = T)
10 | Y<-knn2nb(Y)
11 | X<-nb2listw(Y, style="B", zero.policy=T)
12 | if (class(Layer[[Field]])=="factor"){
13 | Layer[[Field]]<-as.numeric(Layer[[Field]])
14 | }
15 | if(class(Layer[[Field]])=="character"){
16 | Layer[[Field]]<-as.factor(as.numeric(Layer[[Field]]))
17 | }
18 | moran.test(x = Layer[[Field]], listw = X)
19 | test<-moran.mc(x = Layer[[Field]], listw = X,nsim=nb_simul)
20 | test
21 | hist(test$res, freq=TRUE, breaks=20, xlab="Simulatec Moran's I")
22 | abline(v=0,col="red")
--------------------------------------------------------------------------------
/rscripts/Selection_with_r2.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1 = Field Layer
4 | ##Field2 = Field Layer
5 | ##Field3 = Field Layer
6 | ##Field4 = Field Layer
7 | ##Field5 = Field Layer
8 | ##Field6 = Field Layer
9 | ##Methode=Selectionexhaustive;backward;forward;seqrep
10 | ##Nombre_var = number 5
11 | ##showplots
12 | library(leaps)
13 | X<-cbind(Layer[[Field2]],Layer[[Field3]],Layer[[Field4]],Layer[[Field5]],Layer[[Field6]])
14 | colnames(X)<-c(Field2, Field3, Field4, Field5, Field6)
15 | method=c("exhaustive", "backward", "forward", "seqrep")
16 | methode<-method[Methode + 1]
17 | test1<-regsubsets(Layer[[Field1]]~X,data=Layer,really.big=T,nbest=1,nvmax=Nombre_var, intercept=F,method=methode)
18 | plot(test1, scale="r2")
19 |
--------------------------------------------------------------------------------
/scripts/Create_vector_layer_from_SQL_Query.py.help:
--------------------------------------------------------------------------------
1 | {"Unique_id_field_name": "Name of the field containing the unique ID.", "Geometry_field_name": "Name of the field containing the geometry.", "Avoid_select_by_id": "-", "ALG_DESC": "This script runs any query on a PostGis or Spatialite connection, and returns a vector layer.", "Connection_name": "Name of the connection as written in QGIS database tools.", "ALG_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "null": "", "ALG_HELP_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "Query": "The complete SQL query to run.\n\nThis can be any query (for example with more than one table).", "output": "Ouput vector layer containing the result of the query.", "Database_type": "Type of the database : PostGIS or Spatialite.", "ALG_VERSION" : 1}
--------------------------------------------------------------------------------
/scripts/Set_multiple_raster_layers_properties.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script will set some properties to the input raster layers : style, CRS, etc.", "Coordinate_Reference_System": "Coordinate Reference System (CRS) to set for each raster layer.\n\nNote: this will not reproject the layers data.", "ALG_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "QML_file": "[optionnal] If needed, choose the QML style file to apply to the rasters.", "Refresh_contrast_enhancement": "Refresh the contrast enhancement for the raster band.", "ALG_HELP_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "Raster_layers": "The raster layers for which to apply the parameters.", "Save_layer_style_as_default": "If true, save the raster properties as the default style for each raster layer.", "ALG_VERSION" : 1}
--------------------------------------------------------------------------------
/rscripts/Selection_Cp.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1 = Field Layer
4 | ##Field2 = Field Layer
5 | ##Field3 = Field Layer
6 | ##Field4 = Field Layer
7 | ##Field5 = Field Layer
8 | ##Field6 = Field Layer
9 | ##Methode=Selectionexhaustive;backward;forward;seqrep
10 | ##Nombre_var = number 5
11 | ##showplots
12 | library(leaps)
13 | X<-cbind(Layer[[Field2]],Layer[[Field3]],Layer[[Field4]],Layer[[Field5]],Layer[[Field6]])
14 | colnames(X)<-c(Field2, Field3, Field4, Field5, Field6)
15 | method=c("exhaustive", "backward", "forward", "seqrep")
16 | methode<-method[Methode + 1]
17 | test1<-regsubsets(Layer[[Field1]]~X,data=Layer,really.big=T,nbest=1,nvmax=Nombre_var, intercept=F,method=methode)
18 | plot(test1, scale="Cp")
19 | summary(test1)
20 |
--------------------------------------------------------------------------------
/rscripts/Selection_with_r2_adjusted.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1 = Field Layer
4 | ##Field2 = Field Layer
5 | ##Field3 = Field Layer
6 | ##Field4 = Field Layer
7 | ##Field5 = Field Layer
8 | ##Field6 = Field Layer
9 | ##Methode=Selectionexhaustive;backward;forward;seqrep
10 | ##Nombre_var = number 5
11 | ##showplots
12 | library(leaps)
13 | X<-cbind(Layer[[Field2]],Layer[[Field3]],Layer[[Field4]],Layer[[Field5]],Layer[[Field6]])
14 | colnames(X)<-c(Field2, Field3, Field4, Field5, Field6)
15 | method=c("exhaustive", "backward", "forward", "seqrep")
16 | methode<-method[Methode + 1]
17 | test1<-regsubsets(Layer[[Field1]]~X,data=Layer,really.big=T,nbest=1,nvmax=Nombre_var, intercept=F,method=methode)
18 | plot(test1, scale="adjr2")
19 |
--------------------------------------------------------------------------------
/scripts/EquivalentNumField.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Adds a new NUM_FIELD attribute to a vector layer that contains a unique integer value for each different value of a specified attribute in the original layer.\n\nCertain algorithms can only work with numerical values, so this process will by converting strings or other attributes into integers.", "ALG_CREATOR": "Benjamin Trigona-Harany ", "layer": "Input vector layer", "Equivalent_numerical_field_table": "Table that relates the input values of the original fieldname and the new NUM_FIELD values.", "fieldname": "Attribute that will be used to generate unique integer values.", "ALG_VERSION": "1.0", "ALG_HELP_CREATOR": "", "Equivalent_numerical_field_layer": "New layer with the added NUM_FIELD attribute."}
2 |
--------------------------------------------------------------------------------
/rscripts/Selection_with_Bayesian_Information_Criterion.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1 = Field Layer
4 | ##Field2 = Field Layer
5 | ##Field3 = Field Layer
6 | ##Field4 = Field Layer
7 | ##Field5 = Field Layer
8 | ##Field6 = Field Layer
9 | ##Methode=Selectionexhaustive;backward;forward;seqrep
10 | ##Nombre_var = number 5
11 | ##showplots
12 | library(leaps)
13 | X<-cbind(Layer[[Field2]],Layer[[Field3]],Layer[[Field4]],Layer[[Field5]],Layer[[Field6]])
14 | colnames(X)<-c(Field2, Field3, Field4, Field5, Field6)
15 | method=c("exhaustive", "backward", "forward", "seqrep")
16 | methode<-method[Methode + 1]
17 | test1<-regsubsets(Layer[[Field1]]~X,data=Layer,really.big=T,nbest=1,nvmax=Nombre_var, intercept=F, method=methode)
18 | plot(test1, scale="bic")
--------------------------------------------------------------------------------
/rscripts/CAH.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##Field2=Field Layer
5 | ##Field3=Field Layer
6 | ##Field4=Field Layer
7 | ##Field5=Field Layer
8 | ##method= Selection ward;average;single;complete;ward
9 | ##showplots
10 | library(cluster)
11 | Data<-cbind(Layer[[Field1]],Layer[[Field2]],Layer[[Field3]],Layer[[Field4]],Layer[[Field5]])
12 | methodes <-c("ward","average","single","complete","ward")
13 | methode<-methodes[method+1]
14 | methode
15 | cahCSP <- agnes(Data, metric = "euclidean", method = methode)
16 | sortedHeight <- sort(cahCSP$height, decreasing = TRUE)
17 | par(mfrow=c(2,1))
18 | plot(sortedHeight, type = "h", xlab = "Noeuds", ylab = "Niveau d'agregation")
19 | dendroCSP <- as.dendrogram(cahCSP)
20 | plot(dendroCSP, leaflab = "none")
--------------------------------------------------------------------------------
/rscripts/Relative_distribution_(distance_covariate).rsx:
--------------------------------------------------------------------------------
1 | ##Point pattern analysis=group
2 | ##Layer=vector point
3 | ##Covariate=vector
4 | ##Covariate_name=string mandatory_covariate_name_(no_spaces)
5 | ##x_label=string
6 | ##Plot_name=string
7 | ##Legend_position=string float
8 | library(spatstat)
9 | library(maptools)
10 | library(rpanel)
11 | if (Covariate_name == "") {
12 | rp.messagebox('Covariate name must not be emply!', title = 'oops!')}
13 | else {
14 | S <- Layer
15 | SP <- as(S, "SpatialPoints")
16 | P <- as(SP, "ppp")
17 | r <- Covariate
18 | rp <- as.psp(r)
19 | rdist <- distfun(rp)
20 | plot(rhohat(P, rdist, covname=Covariate_name), xlab= x_label,
21 | legendpos = Legend_position,
22 | legendargs=list(bg="transparent"),
23 | main = Plot_name)
24 | ##showplots
25 | }
26 |
--------------------------------------------------------------------------------
/scripts/Generate_Unique_values_renderer.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script will extract unique values from a raster and then use them to generate a single band Pseudocolour style - useful for categorical / classified rasters such as land use.", "ALG_CREATOR": "Andy Harfoot\n2017\nGPLv3", "Raster_to_extract_unique_values": "The raster to extract unique values from and then apply the style to. The raster needs to be loaded into the QGIS canvas", "round_values_to_ndigits": "Optional. If you want values in raster to be rounded before counting, enter ndigits value here. Negative values in this field are accepted and cell values will be rounded to ndigits before decimal point.", "result": "Html file that will have the table with unique cell values and their counts.", "ALG_HELP_CREATOR": "Andy Harfoot\n2017\n"}
--------------------------------------------------------------------------------
/rscripts/raster-attribute-table.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Create and add an attribute table to a raster layer.",
2 |
3 | "ALG_CREATOR": "Alessandro Samuel-Rosa (alessandrosamuelrosa at gmail.com)",
4 |
5 | "ALG_HELP_CREATOR": "Alessandro Samuel-Rosa (alessandrosamuelrosa at gmail.com)",
6 |
7 | "ALG_VERSION": "1.0-0",
8 |
9 | "Layer": "An input raster layer.",
10 |
11 | "Levels": "An optional string with the names that you want to be attributed to the levels of the raster passed to Layer. Level names must be separated by a single space, i.e. level1 level2 level3. If left NULL, then level names will be identical to the integer level values.",
12 |
13 | "Output": "An output raster layer."
14 | }
15 |
--------------------------------------------------------------------------------
/scripts/Contour.py.help:
--------------------------------------------------------------------------------
1 | {"Value_field": "Value for contour processing", "ALG_DESC": "Make polygons on contours from a point vector file. Contours can be optionnaly grouped by a field ", "Group_Field": "Field to sub-categorize points, one category for each unique value of this field", "ALG_CREATOR": "Chourmo - v1\nbased on the contour plugin from Chris Crook : https://github.com/ccrook/QGIS-Contour-Plugin.git", "Points": "Point layer", "Results": "Polygon vector file, with min and max levels fields, value field with the same name as the input field and a group field if used", "Levels": "List of levels, separated by a semi-colon. One contour will be made for each interval", "Group_by_field": "Field to sub-categorize points, one category for each unique value of this field", "ALG_HELP_CREATOR": "Chourmo - v1"}
--------------------------------------------------------------------------------
/scripts/Square_grid_from_layer_extent.py:
--------------------------------------------------------------------------------
1 | ##Vector=group
2 | ##input=vector
3 | ##cellsize=number 1000.0
4 | ##grid=output vector
5 |
6 | input = processing.getObject(input)
7 |
8 | centerx = (input.extent().xMinimum() + input.extent().xMaximum()) / 2
9 | centery = (input.extent().yMinimum() + input.extent().yMaximum()) / 2
10 | width = max((input.extent().xMaximum() - input.extent().xMinimum()), cellsize)
11 | height = width
12 |
13 | #http://docs.qgis.org/2.6/en/docs/user_manual/processing_algs/qgis/vector_creation_tools/creategrid.html
14 | #processing.runalg('qgis:creategrid', type, width, height, hspacing, vspacing, centerx, centery, crs, output)
15 | processing.runalg('qgis:creategrid', 1, width, height, cellsize, cellsize,
16 | centerx, centery, input.crs().authid(), grid)
17 |
--------------------------------------------------------------------------------
/rscripts/Selection_with_r2.rsx.help:
--------------------------------------------------------------------------------
1 | {"Field1 ": "The explained field", "ALG_DESC": "This script does a selection of variables with 6 fields and with the criterion r2. And we can choose the model between: exhaustive serach, forward or backward stepwise, or sequential replacement.\nIt gives the plot of the selection with the criterion r2", "Nombre_var ": "a number which is the number of variables which we want in the best model", "Field3 ": "The second explicative field", "Layer": "Input vector", "Field2 ": "The first explicative field", "ALG_CREATOR": "JEANDENANS L.", "Methode": "a list of methods", "RPLOTS": "the plot of the selection with the criterion r2", "ALG_HELP_CREATOR": "JEANDENANS L.", "Field6 ": "The fifth explicative field", "Field5 ": "The fourth explicative field", "Field4 ": "The third explicative field"}
--------------------------------------------------------------------------------
/scripts/Merge_all_lines_in_layer.py:
--------------------------------------------------------------------------------
1 | # Written by Alexander Muriy with the Google help :)
2 | # (Institute Of Environmental Geoscience, Moscow, Russia)
3 | # amuriy AT gmail DOT com
4 |
5 | ##Merge all lines in layer=name
6 | ##Input_lines=vector
7 | ##Merged_lines=output vector
8 |
9 | from qgis.core import *
10 | from PyQt4.QtCore import *
11 | from processing.tools.vector import VectorWriter
12 |
13 | inputLayer = processing.getObject(Input_lines)
14 | writer = VectorWriter(Merged_lines, None, '', QGis.WKBLineString, inputLayer.crs())
15 | geoms = QgsGeometry.fromWkt('GEOMETRYCOLLECTION EMPTY')
16 | for feature in inputLayer.getFeatures():
17 | geoms = geoms.combine(feature.geometry())
18 |
19 | fet = QgsFeature()
20 | fet.setGeometry(geoms)
21 | writer.addFeature(fet)
22 |
23 | del writer
24 |
--------------------------------------------------------------------------------
/rscripts/Close_neighbor.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##distance= number 10
5 | ##Output= output vector
6 | library(spdep)
7 | library(sp)
8 | coordi <- as.matrix(coordinates(Layer))
9 | tram_nb <- dnearneigh(coordi, d1 = 0, d2 = distance, row.names=Layer[[Field1]])
10 | tram_nb<-as.data.frame(card(tram_nb))
11 | tram<-cbind(as.vector(Layer[[Field1]]), tram_nb)
12 | Coord<-cbind()
13 | n<-length(Layer[[Field1]])
14 | for(i in 1:n){
15 | if (tram[i,2]!=0 ){ Coord <-rbind(Coord,cbind(coordi[i,1], coordi[i,2],as.vector(Layer[[Field1]])[i]))
16 | }
17 | }
18 | x<-as.numeric(Coord[,1])
19 | y<-as.numeric(Coord[,2])
20 | X<-cbind(x,y)
21 | matrix<-as.matrix(X)
22 | matrix<-SpatialPointsDataFrame(matrix, as.data.frame(Coord),proj4string=CRS("+init=epsg:2154"))
23 | Output=matrix
--------------------------------------------------------------------------------
/scripts/Create_vector_layer_from_Postgis_table.py.help:
--------------------------------------------------------------------------------
1 | {"Unique_id_field_name": "Field containing the unique ID", "ALG_DESC": "This script gets the data from a PostGis table and returns a vector layer.", "Database": "Database name", "ALG_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "Where_clause": "Optional WHERE clause which lets you filter the data to return", "User": "User used to connect to the database", "Host": "Database host (IP adress or domain)", "ALG_HELP_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "output": "Output vector layer with the same columns as the original table.\nThe field names may be shorter than the original ones.", "Table": "Table name", "Geometry_column": "Field containing the geometry", "Password": "Password for the chosen user", "Port": "Database port (default value: 5432 )", "Schema": "Schema name", "ALG_VERSION" : 1}
--------------------------------------------------------------------------------
/rscripts/Selection_with_Cp.rsx.help:
--------------------------------------------------------------------------------
1 | {"Field1 ": "The explained field", "ALG_DESC": "This script does a selection of variables with 6 fields and with the citerion: Mallows's Cp. And we can choose the model between: exhaustive serach, forward or backward stepwise, or sequential replacement.\n\nIt gaves the plot of the selection with Mallows's Cp.", "Nombre_var ": "a number which is the number of variables which we want in the best model", "Field3 ": "The second explicative field", "Layer": "Input vector with at least 6 fields.", "Field2 ": "The first explicative field", "ALG_CREATOR": "JEANDENANS L.", "Methode": "a list of methods", "RPLOTS": "the plot of the selection with the citerion: Mallows's Cp", "ALG_HELP_CREATOR": "JEANDENANS L.", "Field6 ": "The fifth explicative field", "Field5 ": "The fourth explicative field", "Field4 ": "The third explicative field"}
--------------------------------------------------------------------------------
/rscripts/Selection_with_r2_adjusted.rsx.help:
--------------------------------------------------------------------------------
1 | {"Field1 ": "The explained field", "ALG_DESC": "This script does a selection of variables with 6 fields and with the criterion r2 adjusted. And we can choose the model between: exhaustive serach, forward or backward stepwise, or sequential replacement.\nIt gives the plot of the selection with the criterion r2 adjusted", "ALG_CREATOR": "JEANDENANS L.", "Field3 ": "The second explicative field", "Layer": "Input vector with at least 6 fields.", "Field2 ": "The first explicative field", "Nombre_var ": "a number which is the number of variables which we want in the best model", "Methode": "a list of methods", "RPLOTS": "the plot of the selection with the criterion r2 adjusted", "ALG_HELP_CREATOR": "JEANDENANS L.", "Field6 ": "The fifth explicative field", "Field5 ": "The fourth explicative field", "Field4 ": "The third explicative field"}
--------------------------------------------------------------------------------
/rscripts/Inverse_Distance_Weigthing.rsx.help:
--------------------------------------------------------------------------------
1 | {"Layer2": "Input raster of the area.", "ALG_DESC": "This script computes an Inverse Distance Weigthing (IDW) with differents methods (parameters are predefinied) and gives a shapefile with coordinates of points and values of the ponderation.\nThe differents methods of ponderation are:\n(0) w(d)=1/(d^p)\n(1) w(d)=1/(1+d^p)\n(2) w(d)=(1-nugget)/(1+(range/d)^2)\n(3) w(d)=(1-nugget)*exp(-range/d)\n(4) w(d)=(1-nugget)*[1-1.5*(range/d)+0.5*(range/d)^3]\n(5) w(d)=1-\u222bp(t)dt, avec p(t) the density of the normal distribution\n(6) w(d)=1/(1+exp(-alpha)), avec alpha=a*d+b (a>0)\n\n", "ALG_CREATOR": "JEANDENANS L.", "Layer1": "Input vector with points", "ALG_HELP_CREATOR": "JEANDENANS L.", "ponderation": "number between 0 and 7 to select the method of ponderation", "output": "a shapefile with coordinates of points and values of the ponderation"}
--------------------------------------------------------------------------------
/scripts/Define_vector_layer_properties.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script apply some properties to the input vector layers : style, CRS, Spatial index, extent, etc.", "QML_file": "If given, the QML style file will be applied to the vector layer.", "Coordinate_Reference_System": "Coordinate Reference System (CRS) to set for the vector layers.\n\nNote: this will not reproject the layer data.", "ALG_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "Vector_layers": "The input vector layers.", "Layer_abstract": "", "Layer_title": "", "ALG_HELP_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "Create_spatial_index": "If true, QGIS will calcultate the spatial index for the layer (if the provider offers this capability).", "Save_layer_style_as_default": "If true, the layer properties will be saved as the default layer style.", "Calculate_extent": "If true, the layer extent will be recalculated.", "ALG_VERSION" : 1}
--------------------------------------------------------------------------------
/rscripts/raster-attribute-table.rsx:
--------------------------------------------------------------------------------
1 | ##Raster processing=group
2 | ##Create raster attribute table=name
3 | ##Layer=raster
4 | ##Levels=string NULL
5 | ##Output=output raster
6 |
7 | # Load necessary libraries ----
8 | library(raster)
9 | library(stringr)
10 |
11 | # Turn raster layer into factor ----
12 | Layer <- as.factor(Layer[[1]])
13 |
14 | # Add raster attribute table ---
15 | rat <- levels(Layer[[1]])[[1]]
16 | if (Levels == "NULL") {
17 | rat$tmp <- rat$ID
18 | } else {
19 | tmp <- c(stringr::str_split_fixed(string = Levels, pattern = " ", n = Inf))
20 | if (length(tmp) != length(rat$ID)) {
21 | stop (paste("'Levels' does not match the number of levels in Layer (", length(rat$ID), ")", sep = ""))
22 | }
23 | rat$tmp <- tmp
24 | }
25 | colnames(rat)[2] <- names(Layer)
26 | levels(Layer)[[1]] <- rat
27 |
28 | # Output ----
29 | Output <- Layer
30 | Output
31 |
--------------------------------------------------------------------------------
/rscripts/Relative_distribution_(raster_covariate).rsx:
--------------------------------------------------------------------------------
1 | ##Point pattern analysis=group
2 | ##points=vector point
3 | ##covariate=raster
4 | ##covariate_name=string mandatory_covariate_name_(no_spaces)
5 | ##x_label=string
6 | ##plot_name=string
7 | ##legend_position=string float
8 | ##showplots
9 | library(geostatsp)
10 | library(maptools)
11 | library(rpanel)
12 | if (covariate_name == "") {
13 | rp.messagebox('"covariate name" must not be empty!', title = 'oops!')
14 | }
15 | else {
16 | S <- points
17 | SP <- as(S, "SpatialPoints")
18 | P <- as(SP, "ppp")
19 | covariate <- raster(covariate, layer = 1)
20 | covariate <- as.im(covariate)
21 | library(spatstat)
22 | S <- points
23 | SP <- as(S, "SpatialPoints")
24 | P <- as(SP, "ppp")
25 | plot(rhohat(P, covariate, covname=covariate_name), xlab= x_label,
26 | legendpos = legend_position,
27 | legendargs=list(bg="transparent"),
28 | main = plot_name)
29 | }
30 |
--------------------------------------------------------------------------------
/scripts/Define_1_vector_layer_properties.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script set some properties to the input vector layer : style, spatial index, extent, title, abstract, etc.", "QML_file": "If given, the QML style file will be applied to the vector layer.", "Coordinate_Reference_System": "Coordinate Reference System (CRS) to set for the vector layer.\n\nNote: this will not reproject the layer data.", "ALG_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "Vector_layer": "The input vector layer.", "Layer_abstract": "The layer abstract to set.", "Layer_title": "The layer title to set.", "ALG_HELP_CREATOR": "Micha\u00ebl DOUCHIN - 3liz", "Create_spatial_index": "If true, QGIS will calcultate the spatial index for the layer (if the provider offers this capability).", "Save_layer_style_as_default": "If true, save the layer properties as the default layer style.", "Calculate_extent": "If true, the layer extent will be recalculated.", "ALG_VERSION" : 1}
--------------------------------------------------------------------------------
/rscripts/Selection_with_Bayesian_Information_Criterion.rsx.help:
--------------------------------------------------------------------------------
1 | {"Field1 ": "The explained field", "ALG_DESC": "This script does a selection of variables with 6 fields and with the bayesian information criterion (BIC). And we can choose the model between: exhaustive serach, forward or backward stepwise, or sequential replacement.\nIt gives the plot of the selection with the bayesian information criterion (BIC).", "Nombre_var ": "a number which is the number of variables which we want in the best model", "Field3 ": "The second explicative field", "Layer": "Input vector with at least 6 fields.", "Field2 ": "The first explicative field", "ALG_CREATOR": "JEANDENANS L.", "Methode": "a list of methods", "RPLOTS": "the plot of the selection with the bayesian information criterion (BIC)", "ALG_HELP_CREATOR": "JEANDENANS L.", "Field6 ": "The fifth explicative field", "Field5 ": "The fourth explicative field", "Field4 ": "The third explicative field"}
--------------------------------------------------------------------------------
/rscripts/Minimum_convex_polygon.rsx.help:
--------------------------------------------------------------------------------
1 | {"Layer": "An vector layer containing the relocations of one or more animals.", "Home_ranges": "The home ranges of the animals calculated according to the selected \"Percentage\" parameter.", "ALG_CREATOR": "This algorithm was written by Filipe S. Dias using the functions written by Clement Calenge, creator of the package \"adehabitatHR\".", "ALG_DESC": "This tool computes the home range of one or more animals with the Minimum Convex Polygon estimator.\n\nR depencies: library \"adehabitatHR\" ", "Field": "A field containing a unique identifier for each animal (type \"string\").\n\n\n", "ALG_HELP_CREATOR": "Filipe S. Dias", "null": "", "Output": "The ouput is a shapefile containing the home range of each animal.", "Percentage": "100 minus the proportion of outliers to be excluded from the computation. E.g. Percentage = 95 means that 5% of the outlier locations will be excluded from the calculations.\n\n\n"}
--------------------------------------------------------------------------------
/rscripts/Tobler.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=raster
3 | ##x_depart= number 0.1
4 | ##y_depart= number 0.1
5 | ##x_arrivee= number 0.1
6 | ##y_arrivee= number 1
7 | ##Output= output vector
8 | library(gdistance)
9 | library(rgdal)
10 | datas<-raster(Layer)
11 |
12 | heightDiff <-function(x)(x[2] - x[1])
13 | hd <- transition(datas,heightDiff,8)#packages gdistance
14 | slope <- geoCorrection(hd, scl=FALSE)#packages gdistance
15 | adj <- adjacent(x = datas, cells = 1:ncell(datas),pairs=TRUE ,directions=8) #packages raster
16 | speed <- slope
17 | speed[adj] <- exp(-3.5 * abs(slope[adj])+0.05)
18 | speed<-geoCorrection(speed) #packages gdistance
19 |
20 | Ax=x_depart
21 | Ay=y_depart
22 | Bx=x_arrivee
23 | By=y_arrivee
24 |
25 | c1<-c(Ax, Ay)
26 | c2<-c(Bx, By)
27 |
28 | sPath1 <-shortestPath(speed, c1, c2,output="SpatialLines")
29 | Output=SpatialLinesDataFrame(sPath1, data=as.data.frame(c(1:length(sPath1))), match.ID = F)
30 |
--------------------------------------------------------------------------------
/models/model_PCA.model.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This model does a Principal Component Analysis (PCA) with 4 fields and gives: (1) the circle of correlations, (2) the plot of the contribution of each field to an axis, (3) a representation of persons , (4) the representation of variables.", "ALG_CREATOR": "JEANDENANS L.", "VECTORLAYER_LAYER": "Input vector with at least 4 fields.", "NUMBER_INDIVIDU2": "a number of a person who we want to see on the plot", "NUMBER_INDIVIDU1": "a number of a person who we want to see on the plot", "TABLEFIELD_FIELD4": "The fourth field", "TABLEFIELD_FIELD2": "The second field", "TABLEFIELD_FIELD3": "The third field", "TABLEFIELD_FIELD1": "The first field", "RPLOTS_ALG0": "The plot of the circle of correlations", "RPLOTS_ALG1": "The plot of the contribution of each field to an axis.", "RPLOTS_ALG2": "The plot of a representation of persons", "RPLOTS_ALG3": "The plot of a representation of variables.", "ALG_HELP_CREATOR": "JEANDENANS L."}
--------------------------------------------------------------------------------
/rscripts/Variogram Modelling.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_CREATOR": "", "ALG_DESC": "This script allows to try and fit different models of semivariograms. It allows to study a variable before interpolation using Ordinary Kriging.", "R_CONSOLE_OUTPUT": "Fitted model parameters ", "field": "numeric field from layer to interpolate", "ALG_CREATOR": "Guillermo Federico Olmedo\nMarcos Angelini", "nugget": "Iniital value for nugget", "Show_Sum_of_Square_Errors": "Boolean. If checked, it will show the sum of squared errors of the model fitting in R console output.", "layer": "points vector layer", "psill": "Initial value for partial sill", "Estimate_range_and_psill_initial_values_from_sample_variogram": "Boolean. If checked, initial values for nugget, psill and range will be estimated from sample variogram", "range": "Initial value for range", "ALG_VERSION": "0.7", "ALG_HELP_CREATOR": "Guillermo Federico Olmedo","ALG_VERSION": "0.7", "model": "Model to fit", "RPLOTS": "Semivariogram and fitted model"}
2 |
--------------------------------------------------------------------------------
/rscripts/Inverse_Distance_Weigthing_with_method_selection.rsx.help:
--------------------------------------------------------------------------------
1 | {"a": "a number", "Layer2": "Input raster of the area.", "ALG_DESC": "This script computes an Inverse Distance Weigthing (IDW) with differents methods and parameters.It gives a shapefile with coordinates of points and values of the ponderation.\nThe differents methods of ponderation are:\n(0) w(d)=1/(d^p)\n(1) w(d)=1/(1+d^p)\n(2) w(d)=(1-nugget)/(1+(range/d)^2)\n(3) w(d)=(1-nugget)*exp(-range/d)\n(4) w(d)=(1-nugget)*[1-1.5*(range/d)+0.5*(range/d)^3]\n(5) w(d)=1-\u222bp(t)dt, avec p(t) the density of the normal distribution\n(6) w(d)=1/(1+exp(-alpha)), avec alpha=a*d+b (a>0)", "b": "a number", "distance.max": "a number >0", "ALG_CREATOR": "JEANDENANS L.", "Layer1": "Input vector with points", "mu": "a number", "puissance": "a number >0", "nugget": "a number", "range": "a number", "ALG_HELP_CREATOR": "JEANDENANS L.", "ponderation": "a number between 0 and 7 to select the method", "output": "a shapefile with coordinates of points and values of the ponderation", "sigma": "a number", "distance.min": "a number >0"}
--------------------------------------------------------------------------------
/rscripts/Monte-Carlo_spatial_randomness.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This is a Monte Carlo test for the point patterns. It is based on simulations from the null hypothesis. It generates randomly dinstributed points within the study region and uses K-function for each set of generated points and compares them to the K-function for the original set of points. Detailed description can be found in correspondin section of this book: http://www.csiro.au/resources/pf16h.\n\nThis script will provide both graphical ('R plots') and verbose ('R console output') output.\n\nR dependencies: library \"maptools\" and \"spatstat\"", "R_CONSOLE_OUTPUT": "Description of the test results.", "ALG_CREATOR": "Yury Ryabov\nriabovv at gmail dot com\n2013\nGPLv3", "Layer": "Point layer to be tested.", "RPLOTS": "Graph showing test results.", "ALG_HELP_CREATOR": "Yury Ryabov\nriabovv at gmail dot com\n2013\nCC-0", "Simulations": "Number of simulations for random points distributions. Positive integer must be provided here.", "Optional_plot_name": "An Optional name for the graph. It is Ok to leave this field blank."}
--------------------------------------------------------------------------------
/scripts/Define_1_raster_layer_properties.py:
--------------------------------------------------------------------------------
1 | ##Raster=group
2 | ##Define 1 raster layer properties=name
3 | ##Raster_layer=raster
4 | ##QML_file=file
5 | ##Coordinate_Reference_System=crs None
6 | ##Refresh_contrast_enhancement=boolean True
7 | ##Save_layer_style_as_default=boolean False
8 |
9 | from qgis.core import *
10 | from qgis.utils import iface
11 | import os
12 |
13 | # rename inputs
14 | crs = Coordinate_Reference_System
15 | qml = QML_file
16 | rce = Refresh_contrast_enhancement
17 | ss = Save_layer_style_as_default
18 |
19 | # Get layer object
20 | layer = processing.getObject(Raster_layer)
21 |
22 | # Set style from QML
23 | if os.path.exists(qml):
24 | layer.loadNamedStyle(qml)
25 | iface.legendInterface().refreshLayerSymbology(layer)
26 |
27 | # Set CRS
28 | if crs:
29 | qcrs = QgsCoordinateReferenceSystem()
30 | qcrs.createFromOgcWmsCrs(crs)
31 | layer.setCrs(qcrs)
32 |
33 |
34 | # Refresh default contrast enhancement
35 | if rce:
36 | layer.setDefaultContrastEnhancement()
37 |
38 | # Save style as default
39 | if ss:
40 | layer.saveDefaultStyle()
--------------------------------------------------------------------------------
/scripts/FrequencyStats.py.help:
--------------------------------------------------------------------------------
1 | {"raster": "Raster layer", "ALG_DESC": "The Frequency Stats algorithm will determine the most and leaset frequent raster values in each area covered by the polygons in the input layer.\n\nThis is useful when performing an analysis of categorical raster data where the values are not continuous.", "output vector": "New polygon file with additional MAJ (majority), MIN (minority), MAJ_P (majority percentage) and MIN_P (minority percentage) attributes.", "ALG_CREATOR": "Benjamin Trigona-Harany ", "output_table": "Table of each feature, number of raster points covered and frequency of distribution between each raster value.", "id_field": "Field that uniquly identifies each feature in the vector layer", "band": "Raster band", "vector": "Polygon layer", "ALG_VERSION": "0.1", "ALG_HELP_CREATOR": "", "Frequency_analysis_table": "Table that will contain the full breakdown of all raster values across each feature.", "Frequency_analysis_layer": "Copy of input polygon layer with additional columns identifying the non-zeor minority band value (MIN) and the majority band value (MAJ).", "null": ""}
--------------------------------------------------------------------------------
/create_lists.py:
--------------------------------------------------------------------------------
1 |
2 | import glob
3 | import os
4 | import json
5 |
6 | if __name__ == '__main__':
7 | scripts = []
8 | for filename in glob.glob('scripts/*.py'):
9 | print filename
10 | basename = os.path.basename(filename)
11 | name = basename[:-3].replace('_', ' ')
12 | with open(filename) as f:
13 | lines = f.readlines()
14 | for line in lines:
15 | if line.endswith('name'):
16 | name = line.split('=')[0][2:]
17 | helpFile = filename + ".help"
18 | if not os.path.exists(helpFile):
19 | version = 1
20 | else:
21 | with open(helpFile) as f:
22 | try:
23 | helpContent = json.load(f)
24 | version = float(helpContent["ALG_VERSION"])
25 | except:
26 | version = 1
27 | scripts.append('%s,%s,%s' % (basename, version, name))
28 | scripts = sorted( scripts )
29 | with open('scripts/list.txt', 'w') as f:
30 | f.write('\n'.join(scripts))
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/rscripts/F_function.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This R script computes simulation envelopes of the F(r) - empty space function.\n\nThe empty space function (also called the \u201cspherical contact distribution\u201d or the \u201cpoint-to-nearest-event\u201d distribution) of a stationary point process X is the cumulative distribution function F of the distance from a fixed point in space to the nearest point of X. An estimate of F derived from a spatial point pattern dataset can be used in exploratory data analysis and formal inference about the pattern . In exploratory analyses, the estimate of F is a useful statistic summarising the sizes of gaps in the pattern. For inferential purposes, the estimate of F is usually compared to the true value of F for a completely random (Poisson) point process.\n\nR dependencies: library \"maptools\" and \"spatstat\"", "ALG_CREATOR": "Victor Olaya - volaya(at)gmail.com", "Layer": "A vector containg a point pattern.", "Nsim": "Number of simulated point patterns to be generated when computing the envelopes.\n\n", "RPLOTS": "Plot with the simulation envelopes.", "ALG_HELP_CREATOR": "Filipe S. Dias - filipesdias(at)gmail.com"}
--------------------------------------------------------------------------------
/scripts/Summarize.py:
--------------------------------------------------------------------------------
1 | ##Table=group
2 | ##input=vector
3 | ##output=output vector
4 |
5 | from PyQt4.QtCore import *
6 | from qgis.core import *
7 |
8 | from processing.tools.vector import VectorWriter
9 |
10 | inputLayer = processing.getObject(input)
11 | features = processing.features(inputLayer)
12 | fields = inputLayer.pendingFields().toList()
13 | outputLayer = VectorWriter(output, None, fields, QGis.WKBPoint,
14 | inputLayer.crs())
15 | count = 0
16 | mean = [0 for field in fields]
17 | x = 0
18 | y = 0
19 | for ft in features:
20 | c = ft.geometry().centroid().asPoint()
21 | x += c.x()
22 | y += c.y()
23 | attrs = ft.attributes()
24 | for f in range(len(fields)):
25 | try:
26 | mean[f] += float(attrs[f])
27 | except:
28 | pass
29 | count += 1
30 | if count != 0:
31 | mean = [value / count for value in mean]
32 | x /= count
33 | y /= count
34 | outFeat = QgsFeature()
35 | meanPoint = QgsPoint(x, y)
36 | outFeat.setGeometry(QgsGeometry.fromPoint(meanPoint))
37 | outFeat.setAttributes([v for v in mean])
38 | outputLayer.addFeature(outFeat)
39 |
--------------------------------------------------------------------------------
/scripts/distance_lines_between_points.py:
--------------------------------------------------------------------------------
1 | ##Distance lines between points=name
2 | ##Vector=group
3 | ##pointLayer=vector
4 | ##outputLayer=output vector
5 |
6 | from qgis.core import *
7 | from PyQt4.QtCore import *
8 | from processing.tools.vector import VectorWriter
9 |
10 | inputLayer = processing.getObject(pointLayer)
11 | # create new layer for output:
12 | fields = [QgsField('distance', QVariant.Double)]
13 | writer = VectorWriter(outputLayer, None, fields, QGis.WKBLineString, inputLayer.crs())
14 | # loop all points:
15 | iter1 = inputLayer.getFeatures()
16 | for feature1 in iter1:
17 | p1 = feature1.geometry().asPoint()
18 | # loop all points again:
19 | iter2 = inputLayer.getFeatures()
20 | for feature2 in iter2:
21 | # check this to prevent creating double (reversed) lines:
22 | if feature1.id() < feature2.id():
23 | # create new line feature:
24 | p2 = feature2.geometry().asPoint()
25 | l = QgsGeometry.fromPolyline([p1,p2])
26 | feat = QgsFeature()
27 | feat.setGeometry(l)
28 | feat.setAttributes([l.length()])
29 | writer.addFeature(feat)
30 | del writer
31 |
--------------------------------------------------------------------------------
/rscripts/ACP_individus.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##Field2=Field Layer
5 | ##Field3=Field Layer
6 | ##Field4=Field Layer
7 | ##Individu1= number 10
8 | ##Individu2= number 10
9 | ##showplots
10 | library(ade4)
11 | library(rpanel)
12 | library(spatstat)
13 | donne<-cbind(Layer[[Field1]], Layer[[Field2]], Layer[[Field3]], Layer[[Field4]])
14 | donne<-na.exclude(donne)
15 | donne<-as.data.frame(donne)
16 | names(donne)<- c(Field1, Field2, Field3, Field4)
17 | acp<- dudi.pca(donne, center = T, scale = T, scannf = F)
18 | cl1<-acp$li[,1]
19 | cc1<-acp$co[,1]
20 | cl2<-acp$li[,2]
21 | cc2<-acp$co[,2]
22 | Nom<-as.vector(Layer[[Field1]])
23 | Nom_bis<-as.vector(Layer[[Field1]])
24 | Nom_bis[Individu1]<-""
25 | Nom_bis[Individu2]<-""
26 | x<-dim(donne)[1]
27 | if (Individu1>x | Individu2>x) {
28 | rp.messagebox('error selection unavailable', title = 'oops!')
29 | } else {
30 | plot(cl1,cl2 ,type="n",main="Les individus",xlim=c(-7,7), ylim=c(-2,2))
31 | abline(h=0,v=0)
32 | text(acp$li[Individu1,1],acp$li[Individu1,2],Nom[Individu1],col="red",cex=1.2)
33 | text(acp$li[Individu2,1],acp$li[Individu2,2],Nom[Individu2],col="orange",cex=1.2)
34 | }
35 | Nom_bis
--------------------------------------------------------------------------------
/rscripts/Kriging.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##showplots
3 | ##Layer=vector
4 | ##Field=Field Layer
5 | ##by=number 0.1
6 | ##Output=output raster
7 | library(automap)
8 | library(raster)
9 | Y<-as.factor(Layer[[Field]])
10 | attribut<-as.data.frame(Y)
11 | A<-as.numeric(Y)
12 | for(j in (1:length(levels(Y))))
13 | for(i in 1:dim(attribut)[1]){
14 | if (attribut[i,1]==levels(Y)[j]){
15 | A[i]=j
16 | }
17 | }
18 | coords<-coordinates(Layer)
19 | MinX<-min(coords[,1])
20 | MinY<-min(coords[,2])
21 | MaxX<-max(coords[,1])
22 | MaxY<-max(coords[,2])
23 | Seqx<-seq(MinX, MaxX, by=by)
24 | Seqy<-seq(MinY, MaxY, by=by)
25 | MSeqx<-rep(Seqx, length(Seqy))
26 | MSeqy<-rep(Seqy, length(Seqx))
27 | MSeqy <- sort(MSeqy, decreasing=F)
28 | Grille <- data.frame(X=MSeqx, Y=MSeqy)
29 | coordinates(Grille)=c("X","Y")
30 | gridded(Grille)<-TRUE
31 | Mesure<- data.frame(LON=coords[,1], LAT=coords[,2],A)
32 | coordinates(Mesure)<-c("LON","LAT")
33 | variogram = autofitVariogram(A~1, Mesure)
34 | plot(variogram)
35 | kriging_result = autoKrige(A~1, Mesure, Grille,model=c("Cir","Lin","Bes","Wav","Hol","Leg","Per","Pen","Mat","Exc","Spl","Ste"))
36 | prediction = raster(kriging_result$krige_output)
37 | Output<-prediction
38 |
--------------------------------------------------------------------------------
/rscripts/Variogram Modelling.rsx:
--------------------------------------------------------------------------------
1 | ##[R-Geostatistics]=group
2 | ##showplots
3 | ##layer=vector
4 | ##field=field layer
5 | ##Estimate_range_and_psill_initial_values_from_sample_variogram=boolean True
6 | ##nugget=number 0
7 | ##model=selection Exp;Sph;Gau;Mat
8 | ##range=number 0
9 | ##psill=number 0
10 | ##Show_Sum_of_Square_Errors=boolean False
11 |
12 | library('sp')
13 | library('gstat')
14 | print(model)
15 | Models<-c("Exp","Sph","Gau","Mat")
16 | model2<-Models[model+1]
17 | # adjust variogram
18 | names(layer)[names(layer)==field]="field"
19 | layer$field <- as.numeric(as.character(layer$field))
20 | str(layer)
21 | layer <- remove.duplicates(layer)
22 | layer <- layer[!is.na(layer$field),]
23 |
24 | g <- gstat(id = field, formula = field~1, data = layer)
25 | vg <- variogram(g)
26 |
27 | if(Estimate_range_and_psill_initial_values_from_sample_variogram){range=NA}
28 | if(Estimate_range_and_psill_initial_values_from_sample_variogram){psill=NA}
29 |
30 | vgm <- vgm(nugget=nugget, range=range, psill=psill, model=model2)
31 | vgm = fit.variogram(vg, vgm)
32 | >vgm
33 | >if(Show_Sum_of_Square_Errors==TRUE){paste("SSE:", attr(vgm, "SSErr"))}
34 | plot(vg, vgm, main = title , plot.numbers = TRUE)
35 |
--------------------------------------------------------------------------------
/models/model_IDW.model.help:
--------------------------------------------------------------------------------
1 | {"NUMBER_DISTMIN": "a number >0", "ALG_DESC": "This script computes an Inverse Distance Weigthing (IDW) with differents methods and gives a shapefile with coordinates of points and values of the ponderation.\nThe differents methods of ponderation are:\n(0) w(d)=1/(d^p)\n(1) w(d)=1/(1+d^p)\n(2) w(d)=(1-nugget)/(1+(range/d)^2)\n(3) w(d)=(1-nugget)*exp(-range/d)\n(4) w(d)=(1-nugget)*[1-1.5*(range/d)+0.5*(range/d)^3]\n(5) w(d)=1-\u222bp(t)dt, avec p(t) the density of the normal distribution\n(6) w(d)=1/(1+exp(-alpha)), avec alpha=a*d+b (a>0)\n\n", "TABLEFIELD_ATTRIBUT": "The field from which we want to do the IDW", "ALG_CREATOR": "JEANDENANS L.", "NUMBER_NUGGET": "a number >0", "NUMBER_PONDRATION": "a number between 0 and 7 to select the method", "NUMBER_B": "a number ", "NUMBER_RANGE": "a number >0", "NUMBER_A": "a number >0", "ALG_HELP_CREATOR": "JEANDENANS L.", "NUMBER_SIGMA": "a number >0", "VECTORLAYER_POINTS": "Input vector with points", "NUMBER_PUISSANCE": "a number >0", "NUMBER_MU": "a number >0", "NUMBER_DISTMAX": "a number >0", "VECTORLAYER_PARCELLE": "Input vector of the area.", "output_ALG1": "a shapefile with coordinates of points and values of the ponderation", "OUTPUT_ALG0": "a raster of the area"}
--------------------------------------------------------------------------------
/scripts/spatial_cross_join_attributes.py:
--------------------------------------------------------------------------------
1 | ##cover_layer_name=vector
2 | ##join_layer_name=vector
3 | ##result=output table
4 |
5 | from qgis.core import *
6 | import csv
7 |
8 | cover_atrs=[];
9 | join_atrs=[];
10 | header_attr=[];
11 |
12 | cover_layer = processing.getObject(cover_layer_name)
13 | join_layer = processing.getObject(join_layer_name)
14 |
15 | for (k, v) in cover_layer.dataProvider().fieldNameMap().iteritems():
16 | cover_atrs.append(k)
17 | header_attr.append('cover_' + k)
18 |
19 | for (k, v) in join_layer.dataProvider().fieldNameMap().iteritems():
20 | join_atrs.append(k)
21 | header_attr.append('join_' + k)
22 |
23 | writer = processing.TableWriter(result, None, header_attr)
24 |
25 | for bb in cover_layer.getFeatures():
26 | request = QgsFeatureRequest()
27 | request.setFilterRect(bb.geometry().boundingBox())
28 | dp = join_layer.dataProvider()
29 | for r in dp.getFeatures(request):
30 | if bb.geometry().intersects(r.geometry()):
31 | row = []
32 | for ca in cover_atrs:
33 | row.append(bb[ca])
34 |
35 | for ja in join_atrs:
36 | row.append(r[ja])
37 |
38 | writer.addRecord(row)
39 |
--------------------------------------------------------------------------------
/scripts/Batch_replace_in_string_via_regex_dictionary.py:
--------------------------------------------------------------------------------
1 | ##Table=group
2 | ##Batch string replace via regex dictionary=name
3 | ##input=string John has a blue car.
4 | ##ignore_case=boolean True
5 | ##verbose=boolean False
6 | ##replaceDict=string {'John': 'Mary', 'blue': 'red', 'car': 'bike'}
7 | ##output=output string
8 |
9 | import ast
10 | import re
11 |
12 | if not input: input = ''
13 | if not replaceDict: replaceDict = '{}'
14 |
15 | if verbose:
16 | progress.setText('INPUT = \n%s\n' % input)
17 | progress.setText('REPLACE DICT = \n%s\n' % replaceDict)
18 |
19 | reOption = re.MULTILINE
20 | if ignore_case:
21 | reOption = reOption|re.IGNORECASE
22 |
23 | # Set output string
24 | output = input
25 |
26 | # Get replaceDict string
27 | # and convert it to dict
28 | d = ast.literal_eval(replaceDict)
29 |
30 | # Only replace strings if d is a dict
31 | if d and isinstance(d, dict):
32 | for k, v in d.items():
33 | # Replace search string by value
34 | r = re.compile(k, reOption)
35 | output = r.sub(v, output)
36 | else:
37 | raise GeoAlgorithmExecutionException('ERROR - Replace dict does not represent a dictionary. String not changed!' )
38 |
39 | if verbose:
40 | progress.setInfo('OUTPUT = \n%s\n' % output)
41 |
--------------------------------------------------------------------------------
/rscripts/AFC.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field1=Field Layer
4 | ##Field2=Field Layer
5 | ##Field3=Field Layer
6 | ##Field4=Field Layer
7 | ##showplots
8 | library(ade4)
9 | library(rpanel)
10 | library(spatstat)
11 | donne<-cbind(Layer[[Field1]], Layer[[Field2]], Layer[[Field3]], Layer[[Field4]])
12 | donne<-na.exclude(donne)
13 | donne<-as.data.frame(donne)
14 | names(donne)<- c(Field1, Field2, Field3, Field4)
15 | afc <- dudi.coa(donne, scannf = FALSE, nf = 2)
16 | summary_afc <- data.frame(
17 | EIG = afc$eig,
18 | PCTVAR = 100 * afc$eig / sum(afc$eig),
19 | CUMPCTVAR = cumsum(100 * afc$eig / sum(afc$eig))
20 | )
21 | par(mfrow=c(1,3))
22 | barplot(summary_afc$PCTVAR,
23 | xlab = "Composantes",
24 | ylab = "Pourcentage de la variance (inertie)",
25 | names = paste("C", seq(1, nrow(summary_afc), 1)),
26 | col = "black",
27 | border = "white")
28 |
29 | plot(afc$li, pch = 20, col = "grey40")
30 | abline(h=0, v=0)
31 | points(afc$co, type = "o", pch = 18, col = "black")
32 | text(afc$co,
33 | labels = row.names(afc$co),
34 | cex = 0.8,
35 | pos = c(rep(4, times = 3), 1, rep(4, times = 4), 3))
36 | contrib_afc<- inertia.dudi(afc,
37 | row.inertia = TRUE,
38 | col.inertia = TRUE)
39 | tab<-cbind(afc$li[,1],afc$li[,2])
40 | contrib_afc$col.abs
41 | s.corcircle(afc$co)
42 |
--------------------------------------------------------------------------------
/scripts/Save_features_filtered_by_expression.py:
--------------------------------------------------------------------------------
1 | ##Vector_layer=group
2 | ##Save features filtered by expression=name
3 | ##Vector_layer=vector
4 | ##Expression=longstring
5 | ##output=output vector
6 |
7 | from qgis.core import *
8 | from processing.tools.vector import VectorWriter
9 |
10 | # Get vector layer object
11 | layer = processing.getObject(Vector_layer)
12 | provider = layer.dataProvider()
13 |
14 | # Filter features
15 | # Build QGIS request with expression
16 | qExp = QgsExpression(Expression)
17 | if not qExp.hasParserError():
18 | qReq = QgsFeatureRequest(qExp)
19 | ok = True
20 | else:
21 | progress.setText('An error occured while parsing the given expression: %s' % qExp.parserErrorString() )
22 | raise Expection(exp.parserErrorString())
23 | ok = False
24 |
25 | # Get features
26 | if ok:
27 | # Get features corresponding to the expression
28 | features = layer.getFeatures( qReq )
29 | else:
30 | # Get all features
31 | features = layer.getFeatures()
32 |
33 | # Create writer
34 | writer = VectorWriter(output, None, provider.fields(),
35 | provider.geometryType(), layer.crs())
36 |
37 | # Export features
38 | for feat in features:
39 | writer.addFeature(feat)
40 |
41 | del writer
42 |
--------------------------------------------------------------------------------
/scripts/Split_vector_layer_by_attribute.py:
--------------------------------------------------------------------------------
1 | ##Vector=group
2 | ##input=vector
3 | ##class_field=field input
4 | ##output=output file
5 | ##nomodeler
6 |
7 | from qgis.core import *
8 | from PyQt4.QtCore import *
9 | from processing.tools.vector import VectorWriter
10 |
11 | layer = processing.getObject(input)
12 | provider = layer.dataProvider()
13 | fields = provider.fields()
14 | writers = {}
15 |
16 | class_field_index = layer.fieldNameIndex(class_field)
17 |
18 | inFeat = QgsFeature()
19 | outFeat = QgsFeature()
20 | inGeom = QgsGeometry()
21 | nElement = 0
22 | writers = {}
23 |
24 | feats = processing.features(layer)
25 | nFeat = len(feats)
26 | for inFeat in feats:
27 | progress.setPercentage(int(100 * nElement / nFeat))
28 | nElement += 1
29 | atMap = inFeat.attributes()
30 | clazz = atMap[class_field_index]
31 | if clazz not in writers:
32 | outputFile = output + '_' + str(len(writers)) + '.shp'
33 | writers[clazz] = VectorWriter(outputFile, None, fields,
34 | provider.geometryType(), layer.crs())
35 | inGeom = inFeat.geometry()
36 | outFeat.setGeometry(inGeom)
37 | outFeat.setAttributes(atMap)
38 | writers[clazz].addFeature(outFeat)
39 |
40 | for writer in writers.values():
41 | del writer
42 |
--------------------------------------------------------------------------------
/scripts/Fill_holes.py:
--------------------------------------------------------------------------------
1 | ##Polygons=vector
2 | ##Max_area=number 100000
3 | ##Results=output vector
4 |
5 | from qgis.core import *
6 | from PyQt4.QtCore import *
7 | from processing.tools.vector import VectorWriter
8 | from shapely.geometry import Polygon, MultiPolygon
9 | from shapely.wkb import loads
10 | from shapely.wkt import dumps
11 |
12 |
13 | polyLayer = processing.getObject(Polygons)
14 | polyPrder = polyLayer.dataProvider()
15 | n = polyLayer.featureCount()
16 | l = 0
17 |
18 | writer = VectorWriter(Results, None, polyPrder.fields(),
19 | QGis.WKBMultiPolygon, polyPrder.crs())
20 |
21 |
22 | resgeom = QgsGeometry()
23 | resfeat = QgsFeature()
24 |
25 | for feat in processing.features(polyLayer):
26 | progress.setPercentage(int(100*l/n))
27 | l+=1
28 |
29 | g = loads(feat.geometry().asWkb())
30 |
31 | if g.geom_type == 'MultiPolygon':
32 | resg = [Polygon(p.exterior,
33 | [r for r in p.interiors if Polygon(r).area > Max_area]) for p in g]
34 |
35 | else:
36 | resg = [Polygon(g.exterior,
37 | [r for r in g.interiors if Polygon(r).area > Max_area])]
38 |
39 | resgeom = QgsGeometry().fromWkt(dumps(MultiPolygon(resg)))
40 |
41 | resfeat.setAttributes(feat.attributes())
42 | resfeat.setGeometry(resgeom)
43 | writer.addFeature(resfeat)
44 |
45 | del writer
46 |
--------------------------------------------------------------------------------
/scripts/Unique_values_count.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This script will count unique values in a raster. Multiband rasters are accepted.", "ALG_CREATOR": "Yury Ryabov\nriabovvv at gmail dot com\n2013\nGPLv3", "output_file": "Provide path to html file that will be created to represent calculation results.", "round_to_ndigits": "If you want values in raster to be rounded and 'round floats' parameter was modified, raster values will be rounded to ndigits after decimal point. Here you enter that ndigits value. Negative values in this dield are accepted and cell values will be rounded to ndigits before decimal point. NOTE that value in this field won't affect calculations if 'round floats' option is set to 'no'. ", "unique_cells_count": "", "round_values_to_ndigits": "Optional. If you want values in raster to be rounded before counting, enter ndigits value here. Negative values in this field are accepted and cell values will be rounded to ndigits before decimal point.", "result": "Html file that will have the table with unique cell values and their counts.", "round_values": "If you want to count values rounded to the certain digit - just modify this string (to 'yes' or whatever you like).", "round_to_digit": "", "input": "Single- or multiband GDAL-supported raster. ", "round_floats": " ", "ALG_HELP_CREATOR": "Yury Ryabov\nriabovvv at gmail dot com\n2013\n"}
--------------------------------------------------------------------------------
/rscripts/G_function.rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This R script computes simulation envelopes of the G(r) - nearest neighbour distance distribution function.\n\nThe nearest neighbour distance distribution function (also called the \u201cevent-to-event\u201d or \u201cinter-event\u201d distribution) of a point process X is the cumulative distribution function G of the distance from a typical random point of X to the nearest other point of X. An estimate of G derived from a spatial point pattern dataset can be used in exploratory data analysis and formal inference about the pattern. In exploratory analyses, the estimate of G is a useful statistic summarising one aspect of the \u201cclustering\u201d of points. For inferential purposes, the estimate of G is usually compared to the true value of G for a completely random (Poisson) point process, which is where lambda is the intensity (expected number of points per unit area). Deviations between the empirical and theoretical G curves may suggest spatial clustering or spatial regularity.\n\nR dependencies: library \"maptools\" and \"spatstat\"", "ALG_CREATOR": "Victor Olaya, volayaf(at)gmail.com", "Layer": "A point pattern process.", "Nsim": "Number of simulated point patterns to be generated when computing the envelopes.", "RPLOTS": "Plot with the simulation envelopes.", "ALG_HELP_CREATOR": "Filipe S. Dias, filipesdias(at)gmail.com"}
--------------------------------------------------------------------------------
/scripts/Keep_n_biggest_parts.py:
--------------------------------------------------------------------------------
1 | ##Polygons=vector
2 | ##To_keep=number 1
3 | ##Results=output vector
4 |
5 | from qgis.core import *
6 | from PyQt4.QtCore import *
7 | from processing.tools.vector import VectorWriter
8 | from operator import itemgetter
9 |
10 | To_keep = int(To_keep)
11 | if To_keep < 1:
12 | progress.setText("At least 1 part to keep")
13 | To_keep = 1
14 |
15 |
16 | polyLayer = processing.getObject(Polygons)
17 | polyPrder = polyLayer.dataProvider()
18 | n = polyLayer.featureCount()
19 | l = 0
20 |
21 | writer = VectorWriter(Results, None, polyPrder.fields(),
22 | QGis.WKBMultiPolygon, polyPrder.crs())
23 |
24 |
25 | for feat in processing.features(polyLayer):
26 | progress.setPercentage(int(100*l/n))
27 | l+=1
28 |
29 | geom = feat.geometry()
30 |
31 | if geom.isMultipart():
32 |
33 | featres = feat
34 |
35 | geoms = geom.asGeometryCollection()
36 | geomlength = [(i, geoms[i].area()) for i in range(len(geoms))]
37 |
38 | geomlength.sort(key=itemgetter(1))
39 |
40 | if To_keep == 1:
41 | featres.setGeometry(geoms[geomlength[-1][0]])
42 | else:
43 | geomres = [geoms[i].asPolygon() for i,a in geomlength[-1 * To_keep]]
44 | featres.setGeometry(QgsGeometry.fromMultiPolygon(geomres))
45 |
46 | writer.addFeature(featres)
47 |
48 | else:
49 | writer.addFeature(feat)
50 |
51 | del writer
52 |
--------------------------------------------------------------------------------
/rscripts/Relative_distribution_(distance_covariate).rsx.help:
--------------------------------------------------------------------------------
1 | {"Plot_name": "Optional plot name.", "ALG_DESC": "This algorithm creates a graph that demonstrates the dependency of the intensity of the point process on the value of covariate. In this algorithm the covariate is the distance to the certain objects. The functionality is based on 'rhohat' function of the 'spatstat' package. \n\nR dependencies: library \"maptools\", \"spatstat\" and \"rpanel\".", "ALG_CREATOR": "Yury Ryabov\n2013\nriabovvv@gmail.com", "Layer": "The point process which distribution will be investigated.", "Covariate_name": "This field is mandatory. Enter the name of the covariate. It will appear at the graph.", "x_label": "Optional label for the X axis. Note that units at the X axis will be the same as in the input layers.", "RPLOTS": "The empirical graph of the dependency of the intensity of the point process on the distance to the given objects.", "ALG_HELP_CREATOR": "Yury Ryabov\n2013\nriabovvv@gmail.com", "Legend_position": "This field defines the position of the legend at the graph. 'float' means that the legend will be placed at the position that would not overlap the graph itself (or will try at least). Other options are: 'topleft', 'topright', 'bottomleft', 'bottomright'. ", "Covariate": "The set of objects the distance from which will be calculated and used as a spatial covariate to the point process."}
--------------------------------------------------------------------------------
/scripts/Create_vector_layer_from_Postgis_table.py:
--------------------------------------------------------------------------------
1 | ##Database=group
2 | ##Create vector layer from postgis table=name
3 | ##Host=string localhost
4 | ##Port=number 5432
5 | ##Database=string
6 | ##User=string
7 | ##Password=string
8 | ##Schema=string public
9 | ##Table=string
10 | ##Geometry_column=string geom
11 | ##Where_clause=string
12 | ##Unique_id_field_name=string id
13 | ##output=output vector
14 |
15 | from qgis.core import *
16 | from processing.tools.vector import VectorWriter
17 |
18 | # Create uri from database connection options
19 | uri = QgsDataSourceURI()
20 | uri.setConnection(Host, str(Port), Database, User, Password)
21 | uri.setDataSource(Schema, Table, Geometry_column, Where_clause, Unique_id_field_name)
22 |
23 | # Create the vector layer
24 | layer = QgsVectorLayer(uri.uri(), 'vlayer', 'postgres')
25 |
26 | # Output the vector layer
27 | if layer.isValid():
28 |
29 | # Create writer
30 | writer = VectorWriter(
31 | output,
32 | None,
33 | layer.dataProvider().fields(),
34 | layer.dataProvider().geometryType(),
35 | layer.crs()
36 | )
37 |
38 | # Export features
39 | features = layer.getFeatures()
40 | for feat in features:
41 | writer.addFeature(feat)
42 |
43 | del writer
44 |
45 | else:
46 | progress.setText('## The layer is invalid - Please check the connection parameters.')
47 |
48 |
--------------------------------------------------------------------------------
/scripts/Set_multiple_raster_layers_properties.py:
--------------------------------------------------------------------------------
1 | ##Raster=group
2 | ##Set multiple raster layers properties=name
3 | ##Raster_layers=multiple raster
4 | ##QML_file=file
5 | ##Coordinate_Reference_System=crs None
6 | ##Refresh_contrast_enhancement=boolean True
7 | ##Save_layer_style_as_default=boolean False
8 |
9 | from qgis.core import *
10 | from qgis.utils import iface
11 | import os
12 |
13 | # rename inputs
14 | crs = Coordinate_Reference_System
15 | qml = QML_file
16 | rce = Refresh_contrast_enhancement
17 | ss = Save_layer_style_as_default
18 |
19 | # Iterate over the chosen layers
20 | layersUri = Raster_layers.split(';')
21 | for i, uri in enumerate(layersUri):
22 | progress.setPercentage(int(100 * i / len(layersUri)))
23 |
24 | # Get layer from passed uri
25 | layer = processing.getObjectFromUri(uri)
26 |
27 | # Set style from QML
28 | if os.path.exists(qml):
29 | layer.loadNamedStyle(qml)
30 | iface.legendInterface().refreshLayerSymbology(layer)
31 |
32 | # Set CRS
33 | if crs:
34 | qcrs = QgsCoordinateReferenceSystem()
35 | qcrs.createFromOgcWmsCrs(crs)
36 | layer.setCrs(qcrs)
37 |
38 | # Refresh default contrast enhancement
39 | if rce:
40 | layer.setDefaultContrastEnhancement()
41 |
42 | # Save style as default
43 | if ss:
44 | layer.saveDefaultStyle()
45 |
46 |
--------------------------------------------------------------------------------
/scripts/DissolveWithStats.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This plugin is based upon the QGIS \"dissolve\" tool but adds the possibility to keep or not the other fields, and to calculate statistics on these other fields such as count, first, last, concatenation and uniquification for string fields, count, first, last, min, max, mean, median, standard deviation, and sum for numeric fields.", "Statistics": "Enter a statistic for each field of your input layer. Possible values for numeric fields are : count, first, last, max, mean, median, min, sd and sum. Possible values for text fields are count, concat, first, last and unique.\nsd stands for standard deviation, concat for concatenation and unique for uniquification.\nIf you don't wish to keep a field, use the 'no' value for this field.\nValues must be entered in the same order as the order of the fields in your input layer, as you can see them in the attribute table. Values must be separated with semicolon (;).\nFor example, if your input layer has 3 fields and you want to calculate the sum for the first one, the count for the second one and not keep the third one, type : sum;count;no", "Output_Layer": "Ouput layer to be created", "ALG_CREATOR": "Julie Pierson, UMR 5319 Passages, CNRS", "ALG_VERSION": "0.4", "Input_layer": "Choose a layer to dissolve", "Dissolve_field": "All the geometries with the same value for this field will be merged together", "ALG_HELP_CREATOR": "Julie Pierson, UMR 5319 Passages, CNRS"}
2 |
--------------------------------------------------------------------------------
/scripts/Remove_parts.py:
--------------------------------------------------------------------------------
1 | ##Polygons=vector
2 | ##Max_Area=number 100000
3 | ##Delete_holes=boolean True
4 | ##Results=output vector
5 |
6 | from qgis.core import *
7 | from PyQt4.QtCore import *
8 | from processing.tools.vector import VectorWriter
9 | from shapely.geometry import Polygon, MultiPolygon
10 | from shapely.wkb import loads
11 | from shapely.wkt import dumps
12 |
13 |
14 | polyLayer = processing.getObject(Polygons)
15 | polyPrder = polyLayer.dataProvider()
16 | n = polyLayer.featureCount()
17 | l = 0
18 |
19 | writer = VectorWriter(Results, None, polyPrder.fields(),
20 | QGis.WKBMultiPolygon, polyPrder.crs())
21 |
22 |
23 | for feat in processing.features(polyLayer):
24 | progress.setPercentage(int(100*l/n))
25 | l+=1
26 |
27 | geom = loads(feat.geometry().asWkb())
28 | resgeom = []
29 |
30 | if geom.geom_type == 'Polygon': geom = [geom]
31 |
32 | for g in geom:
33 |
34 | if Polygon(g.exterior).area > Max_Area: # polygon is large enough
35 |
36 | if not Delete_holes or len(g.interiors) == 0:
37 | resgeom.append(g)
38 |
39 | else:
40 |
41 | # only keep large enough holes
42 |
43 | h = [h for h in g.interiors if Polygon(h).area > Max_Area]
44 | resgeom.append(Polygon(g.exterior,h))
45 |
46 | if len(resgeom) > 0:
47 | feat.setGeometry(QgsGeometry.fromWkt(dumps(MultiPolygon(resgeom))))
48 | writer.addFeature(feat)
49 |
50 | del writer
51 |
--------------------------------------------------------------------------------
/rscripts/Kriging_with_model_selection.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Field=Field Layer
4 | ##Selection=Selection Exp;Log;Sph;Gau;Exc;Mat;Cir;Lin;Bes;Pen;Per;Wav;Hol;Leg;Ste
5 | ## by= number 0.1
6 | ##Output=output raster
7 | library(gstat)
8 | library(rgl)
9 | library("spatstat")
10 | library("maptools")
11 | install.packages("pls")
12 | library (pls)
13 | library(automap)
14 | library(raster)
15 | Y<-as.factor(Layer[[Field]])
16 | attribut<-as.data.frame(Y)
17 | A<-as.numeric(Y)
18 | for(j in (1:length(levels(Y))))
19 | for(i in 1:dim(attribut)[1]){
20 | if (attribut[i,1]==levels(Y)[j]){
21 | A[i]=j
22 | }
23 | }
24 | coords<-coordinates(Layer)
25 | Mesure<- data.frame(LON=coords[,1], LAT=coords[,2], A)
26 | coordinates(Mesure)<-c("LON","LAT")
27 | Models<-c("Exp","Log","Sph","Gau","Exc","Mat","Cir","Lin","Bes","Pen","Per","Wav","Hol","Leg","Ste")
28 | Selection<-Selection+1
29 | select_model<-Models[Selection]
30 | MinX<-min(coords[,1])
31 | MinY<-min(coords[,2])
32 | MaxX<-max(coords[,1])
33 | MaxY<-max(coords[,2])
34 | Seqx<-seq(MinX, MaxX, by=by)
35 | Seqy<-seq(MinY, MaxY, by=by)
36 | MSeqx<-rep(Seqx, length(Seqy))
37 | MSeqy<-rep(Seqy, length(Seqx))
38 | MSeqy <- sort(MSeqy, decreasing=F)
39 | Grille <- data.frame(X=MSeqx, Y=MSeqy)
40 | coordinates(Grille)=c("X","Y")
41 | gridded(Grille)<-TRUE
42 | v<-autofitVariogram(A~1,Mesure,model = select_model)
43 | prediction <-krige(formula=A~1, Mesure, Grille, model=v$var_model)
44 | result<-raster(prediction)
45 | proj4string(Layer)->crs
46 | proj4string(result)<-crs
47 | Output<-result
--------------------------------------------------------------------------------
/rscripts/Relative_distribution_(raster_covariate).rsx.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "This algorithm creates a graph that demonstrates the dependency of the intensity of the point process on the value of the given covariate. In this algorithm the covariate must be represented as a raster. The functionality is based on 'rhohat' function of the 'spatstat' package. \n\nR dependencies: library \"geostatsp\", \"maptools\", \"rpanel\", \"spatstat\"", "plot_name": "Optional plot name.", "ALG_CREATOR": "Yury Ryabov\n2013\nriabovvv@gmail.com", "covariate_name": "This field is mandatory. Enter the name of the covariate. It will appear at the graph.", "x_label": "Optional label for the X axis. Note that units at the X axis will be the same as in the input layers.", "points": "The point process which distribution will be investigated.", "RPLOTS": "The empirical graph of the dependency of the intensity of the point process on the distance to the given objects.", "ALG_HELP_CREATOR": "Yury Ryabov\n2013\nriabovvv@gmail.com", "legend_position": "This field defines the position of the legend at the graph. 'float' means that the legend will be placed at the position that would not overlap the graph itself (or will try at least). Other options are: 'topleft', 'topright', 'bottomleft', 'bottomright'. ", "covariate": "The spatial covariate to the point process. The raster must not have discrete values, i.e. it may not be classified. Only rasters that represent continious phenomena (e.g. DEM, distance maps, etc.) are allowed. Though classified rasters will be processed if supplied, but the results will be meaningless."}
--------------------------------------------------------------------------------
/rscripts/Ordinary Kriging.rsx.help:
--------------------------------------------------------------------------------
1 | {"Local_kriging": "Boolean. If checked, points to interpolate will be limited to a number of nearest observations", "kriging_prediction": "Kriging predicted value", "ALG_DESC": "This script does Ordinary Kriging interpolation from a numeric field of a points vector layer. It allows to auto select the initial values for nugget, psill and range; or it can fit a model from initial values provided. Besides, you can limit the number of points used to predict.", "R_CONSOLE_OUTPUT": "Fitted model parameters ", "Number_of_nearest_observations": "Maximun number of observations used in local kriging", "Estimate_range_and_psill_initial_values_from_sample_variogram": "Boolean. If checked, initial values for nugget, psill and range will be estimated from sample variogram", "ALG_CREATOR": "Guillermo Federico Olmedo\nMarcos Angelini", "nugget": "Iniital value for nugget", "layer": "points vector layer", "psill": "Initial value for partial sill", "field": "numeric field from layer to interpolate", "range": "Initial value for range", "RPLOTS": "Semivariogram and fitted model", "Show_Sum_of_Square_Errors": "Boolean. If checked, it will show the sum of squared errors of the model fitting in R console output.", "ALG_HELP_CREATOR": "Guillermo Federico Olmedo", "Extent": "Method to calculate the extent of interpolation", "ALG_VERSION": "1.0", "model": "Model to fit", "Resolution": "Cellsize of the interpolation raster, in layer units. Only for projected layers. Layers in lat-long will be interpolated over 5000 cells.", "kriging_variance": "Kriging variance of prediction"}
2 |
--------------------------------------------------------------------------------
/scripts/Define_1_vector_layer_properties.py:
--------------------------------------------------------------------------------
1 | ##Vector=group
2 | ##Define 1 vector layer properties=name
3 | ##Vector_layer=vector
4 | ##QML_file=file
5 | ##Coordinate_Reference_System=crs None
6 | ##Create_spatial_index=boolean False
7 | ##Calculate_extent=boolean False
8 | ##Layer_title=string
9 | ##Layer_abstract=longstring
10 | ##Save_layer_style_as_default=boolean False
11 |
12 | from qgis.core import *
13 | from qgis.utils import iface
14 | import os
15 |
16 | # rename inputs
17 | qml = QML_file
18 | crs = Coordinate_Reference_System
19 | csi = Create_spatial_index
20 | ce = Calculate_extent
21 | lt = Layer_title
22 | la = Layer_abstract
23 | ss = Save_layer_style_as_default
24 |
25 | # Get layer object
26 | layer = processing.getObject(Vector_layer)
27 | provider = layer.dataProvider()
28 |
29 | # Set style from QML
30 | if os.path.exists(qml):
31 | layer.loadNamedStyle(qml)
32 | iface.legendInterface().refreshLayerSymbology(layer)
33 |
34 | # Set CRS
35 | if Coordinate_Reference_System:
36 | qcrs = QgsCoordinateReferenceSystem()
37 | qcrs.createFromOgcWmsCrs(crs)
38 | layer.setCrs(qcrs)
39 |
40 | # Create spatial index
41 | if csi and provider.capabilities() and QgsVectorDataProvider.CreateSpatialIndex:
42 | if not provider.createSpatialIndex():
43 | progress.setText(u'Cannot create spatial index for layer : %s' % layer.name())
44 |
45 | # Calculate extent
46 | if ce:
47 | layer.updateExtents()
48 |
49 | # Set layer metadata
50 | if lt:
51 | layer.setTitle(lt)
52 | if la:
53 | layer.setAbstract(la)
54 |
55 | # Save style as default
56 | if ss:
57 | layer.saveDefaultStyle()
--------------------------------------------------------------------------------
/scripts/pygraticule.py.help:
--------------------------------------------------------------------------------
1 | {"ALG_DESC": "Creates a Graticule (Vector Grid) of line objects in GeoJson format. Specifically this method creates lines with dense points so that they reproject as smooth curves.\n\nUsually you create it using the Coordinate System you want to overlay (e.g. Lat/Lon) and then reproject it afterwards on top of your map in a different CRS.\n\n More Details: https://github.com/wildintellect/pyGraticule ", "ymax": "maximum Y (e.g. Latitude or Northing) value (in coordinate system units) of the bounding box for the region you want to cover.", "graticule": "(Optional) Shapefile for auto loading at the end of creation. Directly converted from GeoJson. Specificy or Save As aftewards if you want something besides geojson.", "density": "How often to put a point node in the line. The smaller the number the more points and the smoother the reprojected line. The trade off is in file size.", "ALG_CREATOR": "Alex Mandel", "outfile": "GeoJson file to save result. Make sure to specifiy .geojson or .json in the file format selection. Leave as default temp file if you don't need the geojson.", "xmax": "maximum X (e.g. Longitude or Easting) value (in coordinate system units) of the bounding box for the region you want to cover.", "xmin": "minimum X (e.g. Longitude or Easting) value (in coordinate system units) of the bounding box for the region you want to cover.", "ymin": "minimum Y (e.g. Latitude or Northing) value (in coordinate system units) of the bounding box for the region you want to cover.", "spacing": "The spacing between lines you want in map units (e.g 10 for a line every 10 degrees).", "ALG_VERSION": "1.1", "ALG_HELP_CREATOR": "Alex Mandel"}
2 |
--------------------------------------------------------------------------------
/scripts/Define_vector_layer_properties.py:
--------------------------------------------------------------------------------
1 | ##Vector=group
2 | ##Define multiple vector layers properties=name
3 | ##Vector_layers=multiple vector
4 | ##QML_file=file
5 | ##Coordinate_Reference_System=crs None
6 | ##Create_spatial_index=boolean False
7 | ##Calculate_extent=boolean False
8 | ##Save_layer_style_as_default=boolean False
9 |
10 | from qgis.core import *
11 | from qgis.utils import iface
12 | import os
13 |
14 | # rename inputs
15 | qml = QML_file
16 | crs = Coordinate_Reference_System
17 | csi = Create_spatial_index
18 | ce = Calculate_extent
19 | ss = Save_layer_style_as_default
20 |
21 | # Iterate over the chosen layers
22 | layersUri = Vector_layers.split(';')
23 | for i, uri in enumerate(layersUri):
24 | progress.setPercentage(int(100 * i / len(layersUri)))
25 |
26 | # Get layer from passed uri
27 | layer = processing.getObjectFromUri(uri)
28 | provider = layer.dataProvider()
29 |
30 | # Set style from QML
31 | if os.path.exists(qml):
32 | layer.loadNamedStyle(qml)
33 | iface.legendInterface().refreshLayerSymbology(layer)
34 |
35 | # Set CRS
36 | if Coordinate_Reference_System:
37 | qcrs = QgsCoordinateReferenceSystem()
38 | qcrs.createFromOgcWmsCrs(crs)
39 | layer.setCrs(qcrs)
40 |
41 | # Create spatial index
42 | if csi and provider.capabilities() and QgsVectorDataProvider.CreateSpatialIndex:
43 | if not provider.createSpatialIndex():
44 | progress.setText(u'Cannot create spatial index for layer : %s' % layer.name())
45 |
46 | # Calculate extent
47 | if ce:
48 | layer.updateExtents()
49 |
50 | # Save style as default
51 | if ss:
52 | layer.saveDefaultStyle()
--------------------------------------------------------------------------------
/models/model_PCA.model:
--------------------------------------------------------------------------------
1 | NAME:ACP
2 | GROUP:Basic Statistics
3 | PARAMETER:ParameterVector|VECTORLAYER_LAYER|Layer|-1|False
4 | 120.0,60.0
5 | PARAMETER:ParameterTableField|TABLEFIELD_FIELD1|Field1|VECTORLAYER_LAYER|-1|False
6 | 340.0,60.0
7 | PARAMETER:ParameterTableField|TABLEFIELD_FIELD2|Field2|VECTORLAYER_LAYER|-1|False
8 | 560.0,60.0
9 | PARAMETER:ParameterTableField|TABLEFIELD_FIELD3|Field3|VECTORLAYER_LAYER|-1|False
10 | 780.0,60.0
11 | PARAMETER:ParameterTableField|TABLEFIELD_FIELD4|Field4|VECTORLAYER_LAYER|-1|False
12 | 1000.0,60.0
13 | PARAMETER:ParameterNumber|NUMBER_INDIVIDU1|individu 1|None|None|0.0
14 | 1220.0,60.0
15 | PARAMETER:ParameterNumber|NUMBER_INDIVIDU2|individu 2|None|None|0.0
16 | 1440.0,60.0
17 | VALUE:HARDCODEDPARAMVALUE_Individu1_2===10.0
18 | VALUE:HARDCODEDPARAMVALUE_Individu2_2===10.0
19 | ALGORITHM:r:acpcercle
20 | 120.0,160.0
21 | None
22 | -1|VECTORLAYER_LAYER
23 | -1|TABLEFIELD_FIELD1
24 | -1|TABLEFIELD_FIELD2
25 | -1|TABLEFIELD_FIELD3
26 | -1|TABLEFIELD_FIELD4
27 | C:\Users\Jeandenans Laura\Documents\R\cercle|320.0,160.0
28 | ALGORITHM:r:acpcontribution
29 | 340.0,260.0
30 | None
31 | -1|VECTORLAYER_LAYER
32 | -1|TABLEFIELD_FIELD1
33 | -1|TABLEFIELD_FIELD2
34 | -1|TABLEFIELD_FIELD3
35 | -1|TABLEFIELD_FIELD4
36 | C:\Users\Jeandenans Laura\Documents\R\contribution|540.0,260.0
37 | ALGORITHM:r:acpindividus
38 | 560.0,360.0
39 | None
40 | -1|VECTORLAYER_LAYER
41 | -1|TABLEFIELD_FIELD1
42 | -1|TABLEFIELD_FIELD2
43 | -1|TABLEFIELD_FIELD3
44 | -1|TABLEFIELD_FIELD4
45 | -1|NUMBER_INDIVIDU1
46 | -1|NUMBER_INDIVIDU2
47 | C:\Users\Jeandenans Laura\Documents\R\individus|760.0,360.0
48 | ALGORITHM:r:acpvar
49 | 780.0,460.0
50 | None
51 | -1|VECTORLAYER_LAYER
52 | -1|TABLEFIELD_FIELD1
53 | -1|TABLEFIELD_FIELD2
54 | -1|TABLEFIELD_FIELD3
55 | -1|TABLEFIELD_FIELD4
56 | C:\Users\Jeandenans Laura\Documents\R\var|980.0,460.0
57 |
--------------------------------------------------------------------------------
/scripts/Cut_by_field.py:
--------------------------------------------------------------------------------
1 | ##Polygons=vector
2 | ##Key_Field=field Polygons
3 | ##Cutting_polygons=vector
4 | ##Cut_Key_Field=field Cutting_polygons
5 | ##Results=output vector
6 |
7 | from qgis.core import *
8 | from PyQt4.QtCore import *
9 | from processing.tools.vector import VectorWriter
10 | from shapely.geometry import Polygon, MultiPolygon
11 | from shapely.wkt import loads, dumps
12 |
13 | cutLayer = processing.getObject(Cutting_polygons)
14 | cutPrder = cutLayer.dataProvider()
15 | n = cutLayer.featureCount()
16 | l = 0
17 |
18 | # key: key field, value: list of wkb geoms
19 | cutters = {}
20 |
21 | for feat in processing.features(cutLayer):
22 | progress.setPercentage(int(100*l/n))
23 | l+=1
24 |
25 | k = feat[Cut_Key_Field]
26 | cutters.setdefault(k, [])
27 |
28 | cutters[k].append(feat.geometry().exportToWkt())
29 |
30 |
31 | # Transform list of geometries in multigeometries as WKT
32 |
33 | for k in cutters.keys():
34 |
35 | t = []
36 |
37 | for g in [loads(c) for c in cutters[k]]:
38 |
39 | if g.geom_type == 'Polygon':
40 | t.append(g)
41 | else:
42 | t.extend(g)
43 |
44 | cutters[k] = dumps(MultiPolygon(t))
45 |
46 |
47 | polyLayer = processing.getObject(Polygons)
48 | polyPrder = polyLayer.dataProvider()
49 | n = polyLayer.featureCount()
50 | l = 0
51 |
52 | writer = VectorWriter(Results, None, polyPrder.fields(),
53 | QGis.WKBMultiPolygon, polyPrder.crs())
54 |
55 |
56 | for feat in processing.features(polyLayer):
57 | progress.setPercentage(int(100*l/n))
58 | l+=1
59 |
60 | geom = feat.geometry()
61 | k = feat[Key_Field]
62 |
63 | if k not in cutters:
64 | progress.setText("No corresponding key: {0}".format(k))
65 | else:
66 | cutgeom = QgsGeometry.fromWkt(cutters[k])
67 | feat.setGeometry(geom.intersection(cutgeom))
68 |
69 | writer.addFeature(feat)
70 |
71 | del writer
72 |
--------------------------------------------------------------------------------
/models/model_IDW.model:
--------------------------------------------------------------------------------
1 | NAME:Moyenne_ponderee
2 | GROUP:Basic Statistics
3 | PARAMETER:ParameterVector|VECTORLAYER_POINTS|Point(s)|-1|False
4 | 120.0,60.0
5 | PARAMETER:ParameterVector|VECTORLAYER_PARCELLE|Parcelle|-1|False
6 | 340.0,60.0
7 | PARAMETER:ParameterTableField|TABLEFIELD_ATTRIBUT|Attribut|VECTORLAYER_PARCELLE|-1|False
8 | 560.0,60.0
9 | PARAMETER:ParameterNumber|NUMBER_DISTMIN|Dist_min|None|None|0.0
10 | 783.0,158.0
11 | PARAMETER:ParameterNumber|NUMBER_DISTMAX|Dist_max|None|None|0.0
12 | 782.0,108.0
13 | PARAMETER:ParameterNumber|NUMBER_PUISSANCE|Puissance|None|None|0.0
14 | 782.0,208.0
15 | PARAMETER:ParameterNumber|NUMBER_PONDRATION|Pondération|None|None|0.0
16 | 781.0,61.0
17 | PARAMETER:ParameterNumber|NUMBER_NUGGET|nugget|None|None|0.0
18 | 1066.0,63.0
19 | PARAMETER:ParameterNumber|NUMBER_RANGE|range|None|None|0.0
20 | 1066.0,117.0
21 | PARAMETER:ParameterNumber|NUMBER_A|a|None|None|0.0
22 | 1068.0,169.0
23 | PARAMETER:ParameterNumber|NUMBER_B|b|None|None|0.0
24 | 1071.0,230.0
25 | PARAMETER:ParameterNumber|NUMBER_SIGMA|sigma|None|None|0.0
26 | 1074.0,294.0
27 | PARAMETER:ParameterNumber|NUMBER_MU|mu|None|None|0.0
28 | 1075.0,360.0
29 | VALUE:HARDCODEDPARAMVALUE_DIMENSIONS_0===0
30 | VALUE:HARDCODEDPARAMVALUE_HEIGHT_0===3000.0
31 | VALUE:HARDCODEDPARAMVALUE_WIDTH_0===3000.0
32 | ALGORITHM:gdalogr:rasterize
33 | 299.0,343.0
34 | None
35 | -1|VECTORLAYER_PARCELLE
36 | -1|TABLEFIELD_ATTRIBUT
37 | -1|HARDCODEDPARAMVALUE_DIMENSIONS_0
38 | -1|HARDCODEDPARAMVALUE_WIDTH_0
39 | -1|HARDCODEDPARAMVALUE_HEIGHT_0
40 | raster_parcelle|520.0,369.0
41 | ALGORITHM:r:ponderationselection
42 | 674.0,622.0
43 | None
44 | -1|VECTORLAYER_POINTS
45 | 0|OUTPUT
46 | -1|NUMBER_DISTMAX
47 | -1|NUMBER_DISTMIN
48 | -1|NUMBER_PUISSANCE
49 | -1|NUMBER_PONDRATION
50 | -1|NUMBER_NUGGET
51 | -1|NUMBER_RANGE
52 | -1|NUMBER_SIGMA
53 | -1|NUMBER_MU
54 | -1|NUMBER_A
55 | -1|NUMBER_B
56 | moyenne_ponderee|895.0,649.0
57 |
--------------------------------------------------------------------------------
/scripts/EquivalentNumField.py:
--------------------------------------------------------------------------------
1 | ##Create equivalent numerical field=name
2 | ##Vector=group
3 | ##layer=vector
4 | ##fieldname=field layer
5 | ##Equivalent_numerical_field_layer=output vector
6 | ##Equivalent_numerical_field_table=output table
7 |
8 | # In addition to adding the Equivalent numerical field, this will create a reference table
9 | # to relate the numbers with their original values.
10 |
11 | from PyQt4.QtCore import QVariant
12 | from qgis.core import (
13 | QgsFeatureRequest,
14 | QgsGeometry,
15 | QGis,
16 | QgsFeature,
17 | QgsField)
18 | from processing.tools import vector
19 | from processing.tools.vector import TableWriter
20 | from processing.tools.vector import VectorWriter
21 | from processing.core.GeoAlgorithmExecutionException import *
22 |
23 | vlayer = processing.getObject(layer)
24 | vprovider = vlayer.dataProvider()
25 | fieldindex = vlayer.fieldNameIndex(fieldname)
26 | fields = vprovider.fields()
27 | fields.append(QgsField('NUM_FIELD', QVariant.Int))
28 |
29 | layer_writer = VectorWriter(Equivalent_numerical_field_layer, None, fields, vprovider.geometryType(), vlayer.crs())
30 | table_writer = TableWriter(Equivalent_numerical_field_table, None, [fieldname, 'num'])
31 |
32 | outFeat = QgsFeature()
33 | inGeom = QgsGeometry()
34 | nElement = 0
35 | classes = {}
36 | features = vector.features(vlayer)
37 | nFeat = len(features)
38 | for feature in features:
39 | progress.setPercentage(int(100 * nElement / nFeat))
40 | nElement += 1
41 | inGeom = feature.geometry()
42 | outFeat.setGeometry(inGeom)
43 | atMap = feature.attributes()
44 | clazz = atMap[fieldindex]
45 | if clazz not in classes:
46 | classes[clazz] = len(classes.keys())
47 | table_writer.addRecord([clazz, classes[clazz]])
48 | atMap.append(classes[clazz])
49 | outFeat.setAttributes(atMap)
50 | layer_writer.addFeature(outFeat)
51 |
52 | del layer_writer
53 | del table_writer
54 |
--------------------------------------------------------------------------------
/rscripts/Selection_with_criterion_choice.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##Criteres= Selection cp;bic;r2;adjr2;tous
4 | ##Methode=Selectionexhaustive;backward;forward;seqrep
5 | ##nbr_var = number 10
6 | ##Field1 = Field Layer
7 | ##Field2 = Field Layer
8 | library(leaps)
9 | library(stats)
10 | library(rpanel)
11 | layer<-as.data.frame(Layer)
12 | Z<-colnames(Layer@data)
13 | z<-c()
14 | X<-c()
15 | for (j in (1:dim(layer)[2])){
16 | X<-cbind(X,layer[,j])
17 | z<-cbind(z,Z[j])
18 | }
19 | colnames(X)<-z
20 | a<-which(colnames(X)==Field1)
21 | X<-X[,-a]
22 | b<-which(colnames(X)==Field2)
23 | X<-X[,-b]
24 | Y<-Layer[[Field1]]
25 | method=c("exhaustive", "backward", "forward", "seqrep")
26 | methode<-method[Methode + 1]
27 | test1<-regsubsets(Y~X,data=Layer, really.big=T,nbest=1,nvmax=nbr_var, intercept=F,method=methode)
28 | model<-summary(test1)$which
29 | result1<-cbind()
30 | result2<-cbind()
31 | result3<-cbind()
32 | result4<-cbind()
33 | result<-cbind()
34 | num_mod<-which.min(summary(test1)$cp)
35 | beta<-model[num_mod,]
36 | for(i in (1:length(beta))){
37 | if (beta[i]==T) {
38 | result<-c(result,beta[i])
39 | }
40 | }
41 | critere<-c("cp","bic","r2","adjr2","tous")
42 | critere<-critere[Criteres+1]
43 | if (critere=="tous"){
44 | num_mod2<-which.min(summary(test1)$bic)
45 | num_mod1<-which.min(summary(test1)$cp)
46 | num_mod4<-which.min(summary(test1)$adjr2)
47 | num_mod3<-which.min(summary(test1)$rsq)
48 | beta1<-model[num_mod1,]
49 | beta2<-model[num_mod2,]
50 | beta3<-model[num_mod3,]
51 | beta4<-model[num_mod4,]
52 | for(i in (1:length(beta1))){
53 | if (beta1[i]==T) {
54 | result1<-c(result1,beta1[i])
55 | }
56 | }
57 | for(i in (1:length(beta2))){
58 | if (beta2[i]==T) {
59 | result2<-c(result2,beta2[i])
60 | }
61 | }
62 | for(i in (1:length(beta3))){
63 | if (beta3[i]==T) {
64 | result3<-c(result3,beta3[i])
65 | }
66 | }
67 | for(i in (1:length(beta4))){
68 | if (beta4[i]==T) {
69 | result4<-c(result4,beta4[i])
70 | }
71 | }
72 | }
73 | result
74 | result1
75 | result2
76 | result3
77 | result4
--------------------------------------------------------------------------------
/scripts/Buffer Contour.py:
--------------------------------------------------------------------------------
1 | ##Points=vector point
2 | ##Value_field=field Points
3 | ##Levels=string 10;20
4 | ##Buffer_parameter=number 60
5 | ##Max_buffer_size=number 500
6 | ##Group_by_field=boolean True
7 | ##Group_Field=field Points
8 | ##Contour=output vector
9 |
10 | from qgis.core import *
11 | from PyQt4.QtCore import *
12 | from processing.tools.vector import VectorWriter
13 | from shapely.ops import cascaded_union
14 | from shapely.wkb import loads
15 | from shapely.wkt import dumps
16 |
17 |
18 | levels = [float(x) for x in Levels.split(";")]
19 | maxlevel = max(levels)
20 | mbuf = Max_buffer_size
21 |
22 | progress.setText("lvls {0}".format(levels))
23 |
24 | nodeLayer = processing.getObject(Points)
25 | nodePrder = nodeLayer.dataProvider()
26 | n = nodeLayer.featureCount()
27 | l = 0
28 |
29 | pts = {}
30 | bpr = Buffer_parameter
31 |
32 | for feat in processing.features(nodeLayer):
33 | progress.setPercentage(int(100*l/n))
34 | l+=1
35 |
36 | if feat[Value_field] < maxlevel:
37 | if Group_by_field: k = feat[Group_Field]
38 | else: k = 'a'
39 |
40 | if k not in pts: pts[k] = []
41 |
42 | pts[k].append((feat.geometry().asPoint(), feat[Value_field]))
43 |
44 |
45 | if Group_by_field:
46 | fields = [QgsField(Group_Field, QVariant.String), QgsField('level', QVariant.Double)]
47 | else:
48 | fields = [QgsField('level', QVariant.Double)]
49 |
50 | writer = VectorWriter(Contour, None, fields, QGis.WKBMultiPolygon, nodePrder.crs())
51 |
52 | feat = QgsFeature()
53 |
54 | n = len(pts)
55 | l = 0
56 |
57 | for k,v in pts.iteritems():
58 | progress.setPercentage(int(100*l/n))
59 | l+=1
60 |
61 | if Group_by_field: attrs = [k, 0]
62 | else: attrs = [0]
63 |
64 | for l in levels:
65 |
66 | if Group_by_field: attrs[1] = l
67 | else: attrs[0] = l
68 | feat.setAttributes(attrs)
69 |
70 | ptlist = [x for x in v if x[1] < l]
71 | polygons = [loads(QgsGeometry.fromPoint(p).buffer(min(mbuf, d * bpr), 10).asWkb())
72 | for p,d in ptlist]
73 |
74 | feat.setGeometry(QgsGeometry.fromWkt(dumps(cascaded_union(polygons))))
75 | writer.addFeature(feat)
76 |
77 | del writer
78 |
--------------------------------------------------------------------------------
/scripts/CSV_RGB_or_HEX_to_categorized_style.py.help:
--------------------------------------------------------------------------------
1 | {"Value_field": "EN : Fields vector layer on which the style will be applied.\n\nFR : Champs de la couche vecteur sur lequel on va appliquer le style.", "ALG_DESC": "EN : The script generates a categorized style from a CSV file including color information (RGB or HEX).\nMore informations : http://www.gis-blog.fr/2015/03/08/generer-un-style-categorise-sur-qgis-a-partir-dun-fichier-csv/\n\nFR : Le script permet de g\u00e9n\u00e9rer un style cat\u00e9goris\u00e9 \u00e0 partir d'un fichier CSV dans lequel on trouve des informations de couleur (RGB ou Hexad\u00e9cimal).\nPlus d'informations : http://www.gis-blog.fr/2015/03/08/generer-un-style-categorise-sur-qgis-a-partir-dun-fichier-csv/", "Outline": "EN : Check to add a black outline.\n\nFR : Cocher pour ajouter une bordure noir.", "ALG_CREATOR": "Florian Boret", "Vector_layer": "EN : Vector layer on which you want to apply the style.\n\nFR : Couche vecteur sur laquelle on souhaite appliquer le style.", "CSV_Encoding": "EN : Encoding of the CSV file.\\nExemple : latin1, utf-8,... \n\nFR : Encodage du fichier CSV.\\nExemple : latin1, utf-8,...", "Outline_width": "EN : Outline thickness.\n\nFR : Epaisseur de la bordure.", "CSV_file_with_semicolon_delimiter": "EN : Delimited file in CSV format (delimiter : semicolon).\n\nFR : Fichier d\u00e9limit\u00e9 au format CSV (d\u00e9limiteur : point-virgule).\nExemple : Nomenclature Corine Land Cover", "Column_value": "EN : Define the value column in the CSV file.\nExemple : Column 0\n\nFR : D\u00e9finir la valeur de la colonne dans le fichier CSV.\nExemple : Colonne 0", "Column_label": "EN : Define the label column in the CSV file.\nExemple : Column 1\n\nFR : D\u00e9finir la colonne de l'\u00e9tiquette dans le fichier CSV.\nExemple : Colonne 1", "ALG_VERSION": "1.1", "ALG_HELP_CREATOR": "Florian Boret", "Transparency": "EN : Transparency between 0 and 1.\n0 : Transparency\n1 : No transparency\n\nFR : Transparence comprise entre 0 et 1.\n0 : Transparence\n1 : Pas de transparence", "Column_RGB_or_HEX": "EN : Define the \"RGB or HEX\" column in the CSV file.\nExemple : Column 3\n\nFR : D\u00e9finir la colonne \"RGB or HEX\" dans le fichier CSV.\nExemple : Colonne 3", "Save_layer_style_as_default": ""}
--------------------------------------------------------------------------------
/rscripts/Douglas-Peucker_with_choice.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##distance= number 200
4 | ##numero= number 11
5 | ##Output= output vector
6 | library(rgdal)
7 | library('maptools')
8 | library(maptools)
9 | library(rgeos)
10 | library(geosphere)
11 | library(stats)
12 | distance_max = function(PointList){
13 | dmax = 0
14 | index = 0
15 | n <- dim(PointList)[1]
16 | for (i in (2 :(n - 1))){
17 | p=PointList[i,]
18 | line=rbind(PointList[1,], PointList[n,])
19 | d = dist2Line(p, line, distfun=distHaversine)
20 | if (d[1] > dmax){
21 | index = i
22 | dmax = d[1]
23 | }
24 | }
25 | result<-c(index,dmax)
26 | return (result)
27 | }
28 |
29 | DouglasPeucker= function(PointList, epsilon){
30 | n <- dim(PointList)[1]
31 | d<-distance_max(PointList)
32 | if (d[2] < epsilon){
33 | Result<-PointList
34 | }
35 | else if (d[2] >= epsilon){
36 | X<- PointList[1:d[1],]
37 | Y<- PointList[d[1]:n,]
38 | ResultList<-list(X,Y)
39 | k=1
40 | while (k= epsilon){
43 | m<-dim(ResultList[[k]])[1]
44 | X<- ResultList[[k]][1:d[1],]
45 | Y<- ResultList[[k]][d[1]:m,]
46 | ResultList[[k]]<-NULL
47 | ResultList<-c(ResultList,list(X,Y))
48 | k=k
49 | } else {
50 | m<-dim(ResultList[[k]])[1]
51 | ResultList[[k]]<-rbind(ResultList[[k]][1,],ResultList[[k]][m,])
52 | k=k+1
53 | }
54 | }
55 | Result<-c(ResultList)
56 | }
57 | return (Result)
58 | }
59 | p<-length(Layer@polygons[[numero]]@Polygons)
60 | Resultats<-c()
61 | Coordi<-c()
62 | for (w in (1:p)){
63 | Coords<-c()
64 | points<-Layer@polygons[[numero]]@Polygons[[w]]@coords
65 | result<-DouglasPeucker(PointList=points, epsilon=distance)
66 | if (class(result)=='matrix'){
67 | Resultats<-Resultats
68 | Coordi<-Coordi
69 | } else{
70 | Results<-rep(list(0),length(result))
71 | Result<-rep(list(0),length(result))
72 | for (t in (1:(length(result)))){
73 | Coords<-rbind(Coords,result[[t]][1,])
74 | }
75 | for (k in (1:(length(result)))){
76 | Result[[k]]<-Line(result[[k]])
77 | Results[[k]]<-Lines(list(Result[[k]]),ID=paste("lignes",k,w))
78 | }
79 | Resultats<-c(Resultats,Results)
80 | Coordi<-rbind(Coordi,Coords)
81 | }
82 | }
83 | X<-SpatialLines(Resultats)
84 | XX<-SpatialLinesDataFrame(X, data=as.data.frame(Coordi[1:length(X),]), match.ID = F)
85 | Output=XX
86 |
--------------------------------------------------------------------------------
/scripts/Unique_values_count.py:
--------------------------------------------------------------------------------
1 | ##Raster=group
2 | ##input=raster
3 | ##round_values_to_ndigits=number 3
4 | ##Sort_by_count=boolean False
5 | ##Highest_value_on_top=boolean True
6 | ##output_file=output html
7 |
8 | from osgeo import gdal
9 | import sys
10 | import math
11 | import operator
12 |
13 | # load raster
14 | gdalData = gdal.Open(str(input))
15 |
16 | # get width and heights of the raster
17 | xsize = gdalData.RasterXSize
18 | ysize = gdalData.RasterYSize
19 |
20 | # get number of bands
21 | bands = gdalData.RasterCount
22 |
23 | # start writing html output
24 | f = open(output_file, 'a')
25 | f.write('\n| Band Number | Cell Value | Count | \n')
26 |
27 | # process the raster
28 | for i in xrange(1, bands + 1):
29 | progress.setText("processing band " + str(i) + " of " + str(bands))
30 | band_i = gdalData.GetRasterBand(i)
31 | raster = band_i.ReadAsArray()
32 |
33 | # create dictionary for unique values count
34 | count = {}
35 |
36 | # count unique values for the given band
37 | for col in range( xsize ):
38 | if col % 10 == 0: progress.setPercentage(int(100*col/xsize))
39 | for row in range( ysize ):
40 | cell_value = raster[row, col]
41 |
42 | # check if cell_value is NaN
43 | if math.isnan(cell_value):
44 | cell_value = 'Null'
45 |
46 | # round floats if needed
47 | elif round_values_to_ndigits:
48 | try:
49 | cell_value = round(cell_value, int(round_values_to_ndigits))
50 | except:
51 | cell_value = round(cell_value)
52 |
53 | # add cell_value to dictionary
54 | try:
55 | count[cell_value] += 1
56 | except:
57 | count[cell_value] = 1
58 |
59 | # decide whether to sort by the count-column or the value-column
60 | if Sort_by_count:
61 | sortcount = sorted(count.items(), key=operator.itemgetter(1), reverse=Highest_value_on_top)
62 | else:
63 | sortcount = sorted(count.items(), key=operator.itemgetter(0), reverse=Highest_value_on_top)
64 |
65 | # print sorted results
66 | for j in range(len(sortcount)):
67 | line = "%s | %s | %s | " %(i, sortcount[j][0], sortcount[j][1])
68 | f.write(''+ line + '
' + '\n')
69 |
70 | f.write('
')
71 | f.close
72 |
73 |
--------------------------------------------------------------------------------
/scripts/realcentroid_algorithm.py:
--------------------------------------------------------------------------------
1 | ##Real centroid=name
2 | ##Vector=group
3 | ##poly=vector
4 | ##output=output vector
5 | from qgis.core import *
6 | from qgis.core import *
7 | from qgis.utils import *
8 | from processing.tools.vector import VectorWriter
9 | from math import sqrt
10 |
11 | inputLayer = processing.getObject(poly)
12 | features = processing.features(inputLayer)
13 | fields = inputLayer.pendingFields().toList()
14 | outputLayer = VectorWriter(output, None, fields, QGis.WKBPoint,
15 | inputLayer.crs())
16 | outFeat = QgsFeature()
17 | for inFeat in features:
18 | inGeom = inFeat.geometry()
19 | if inGeom.isMultipart():
20 | # find largest part in case of multipart
21 | maxarea = 0
22 | tmpGeom = QgsGeometry()
23 | for part in inGeom.asGeometryCollection():
24 | area = part.area()
25 | if area > maxarea:
26 | tmpGeom = part
27 | maxarea = area
28 | inGeom = tmpGeom
29 | atMap = inFeat.attributes()
30 | if QGis.QGIS_VERSION > '2.4':
31 | outGeom = inGeom.pointOnSurface()
32 | else:
33 | outGeom = inGeom.centroid()
34 | if not inGeom.contains(outGeom):
35 | # weight point outside the polygon
36 | # find intersection of horizontal line through the weight pont
37 | rect = inGeom.boundingBox()
38 | horiz = QgsGeometry.fromPolyline([QgsPoint(rect.xMinimum(), outGeom.asPoint()[1]), QgsPoint(rect.xMaximum(), outGeom.asPoint()[1])])
39 | line = horiz.intersection(inGeom)
40 | if line.isMultipart():
41 | # find longest intersection
42 | mline = line.asMultiPolyline()
43 | l = 0
44 | for i in range(len(mline)):
45 | d = sqrt((mline[i][0][0] - mline[i][1][0])**2 + (mline[i][0][1] - mline[i][1][1])**2)
46 | if d > l:
47 | l = d
48 | xMid = (mline[i][0][0] + mline[i][1][0]) / 2.0
49 | yMid = (mline[i][0][1] + mline[i][1][1]) / 2.0
50 | else:
51 | xMid = (line.vertexAt(0).x() + line.vertexAt(1).x()) / 2.0
52 | yMid = (line.vertexAt(0).y() + line.vertexAt(1).y()) / 2.0
53 | outGeom = QgsGeometry.fromPoint(QgsPoint(xMid, yMid))
54 | outFeat.setAttributes(atMap)
55 | outFeat.setGeometry(outGeom)
56 | outputLayer.addFeature(outFeat)
57 |
--------------------------------------------------------------------------------
/scripts/predominant_category.py:
--------------------------------------------------------------------------------
1 | ##Assing predominant category=name
2 | ##Polygons=group
3 | ##layera=vector polygon
4 | ##layerb=vector polygon
5 | ##category=field layerb
6 | ##output=output vector
7 |
8 | from PyQt4.QtCore import QVariant
9 | from qgis.core import (
10 | QgsFeatureRequest,
11 | QgsGeometry,
12 | QGis,
13 | QgsFeature,
14 | QgsField)
15 | from processing.tools import vector
16 | from processing.tools.vector import VectorWriter
17 | from processing.core.GeoAlgorithmExecutionException import *
18 |
19 | layera = processing.getObject(layera)
20 | layerb = processing.getObject(layerb)
21 |
22 | providera = layera.dataProvider()
23 | fieldsa = providera.fields()
24 | providerb = layerb.dataProvider()
25 | fieldsb = providerb.fields()
26 | fieldIdx = layerb.fieldNameIndex(category)
27 | fields =[]
28 | fields.extend(fieldsa)
29 | fields.append(QgsField(vector.createUniqueFieldName('MAJ', fieldsa), fieldsb.field(category).type()))
30 | writer = VectorWriter(output, None, fields, QGis.WKBMultiPolygon, layera.crs())
31 | outFeat = QgsFeature()
32 | index = vector.spatialindex(layerb)
33 | featuresa = list(layera.getFeatures())
34 | nfeat = len(featuresa)
35 | nprogress = 1 / float(nfeat) * 100
36 | try:
37 | for n, feat in enumerate(featuresa):
38 | geom = feat.geometry()
39 | attrs = feat.attributes()
40 | intersects = index.intersects(geom.boundingBox())
41 | maxArea = -1
42 | cat = None
43 | nintersects = len(intersects)
44 | for m, i in enumerate(intersects):
45 | progress.setPercentage((nprogress * n) + (nprogress * (m / float(nintersects))))
46 | request = QgsFeatureRequest().setFilterFid(i)
47 | featb = layerb.getFeatures(request).next()
48 | tmpGeom = featb.geometry()
49 | if geom.intersects(tmpGeom):
50 | intGeom = geom.intersection(tmpGeom)
51 | if not intGeom:
52 | continue
53 | area = intGeom.area()
54 | if area > maxArea:
55 | maxArea = area
56 | cat = featb.attributes()[fieldIdx]
57 | outFeat.setGeometry(geom)
58 | attrs.append(cat)
59 | outFeat.setAttributes(attrs)
60 | writer.addFeature(outFeat)
61 |
62 | except Exception, e:
63 | raise GeoAlgorithmExecutionException(e.args[0])
64 |
65 | del writer
66 |
--------------------------------------------------------------------------------
/scripts/make_landsat8_footprints.py:
--------------------------------------------------------------------------------
1 | ##url=string http://landsat-pds.s3.amazonaws.com/scene_list.gz
2 | ##output_layer=output vector
3 |
4 | from qgis.core import *
5 | from PyQt4.QtCore import *
6 | import os, sys
7 | import urllib2
8 | import gzip
9 |
10 | shpname = os.path.basename(output_layer)[:-4]
11 | folder = os.path.dirname(output_layer)
12 |
13 | gzfilename = os.path.join(folder, os.path.basename(url))
14 | filename = gzfilename.replace(".gz", "")
15 |
16 | req = urllib2.urlopen(url)
17 | with open(gzfilename, 'wb') as fp:
18 | fp.write(req.read())
19 |
20 | inF = gzip.open(gzfilename, 'rb')
21 | outF = open(filename, 'wb')
22 | outF.write( inF.read() )
23 | inF.close()
24 | outF.close()
25 |
26 | f = open(filename, "r")
27 | header = f.readline()
28 |
29 | fields = QgsFields()
30 | fields.append(QgsField("ENTITY_ID", QVariant.String))
31 | fields.append(QgsField("ACQ_DATE", QVariant.String))
32 | fields.append(QgsField("CLOUDCOVER", QVariant.Double))
33 | fields.append(QgsField("PROC_LEVEL", QVariant.String))
34 | fields.append(QgsField("PATH", QVariant.Int))
35 | fields.append(QgsField("ROW", QVariant.Int))
36 | fields.append(QgsField("MIN_LAT", QVariant.Double))
37 | fields.append(QgsField("MIN_LON", QVariant.Double))
38 | fields.append(QgsField("MAX_LAT", QVariant.Double))
39 | fields.append(QgsField("MAX_LON", QVariant.Double))
40 | fields.append(QgsField("DATA_URL", QVariant.String))
41 |
42 | crs = QgsCoordinateReferenceSystem()
43 | crs.createFromString('GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]')
44 | writer = QgsVectorFileWriter(output_layer, "CP1250", fields, QGis.WKBPolygon, crs, "ESRI Shapefile")
45 | for line in f:
46 | line = line.strip().split(",")
47 | min_lat = float(line[6])
48 | min_lon = float(line[7])
49 | max_lat = float(line[8])
50 | max_lon = float(line[9])
51 |
52 | if abs(max_lon - min_lon) > 180:
53 | min_lon = min_lon + 360
54 |
55 | feat = QgsFeature()
56 | gPolygon = QgsGeometry.fromPolygon([[QgsPoint(min_lon,max_lat), QgsPoint(max_lon,max_lat), QgsPoint(max_lon,min_lat), QgsPoint(min_lon,min_lat)]])
57 | feat.setGeometry(gPolygon)
58 |
59 | feat.setAttributes([line[0], line[1], float(line[2]), line[3], int(line[4]), int(line[5]), min_lat, min_lon, max_lat, max_lon, line[10]])
60 |
61 | writer.addFeature(feat)
62 | del writer
63 | f.close()
64 |
65 | # Remove
66 | os.unlink(gzfilename)
67 | os.unlink(filename)
68 |
--------------------------------------------------------------------------------
/scripts/CSV_R-G-B_to_categorized_style.py.help:
--------------------------------------------------------------------------------
1 | {"Value_field": "EN : Fields vector layer on which the style will be applied.\n\nFR : Champs de la couche vecteur sur lequel on va appliquer le style.", "ALG_DESC": "EN : The script generates a categorized style from a CSV file including color information (R-G-B).\nMore informations : http://www.gis-blog.fr/2015/03/08/generer-un-style-categorise-sur-qgis-a-partir-dun-fichier-csv/\n\nFR : Le script permet de g\u00e9n\u00e9rer un style cat\u00e9goris\u00e9 \u00e0 partir d'un fichier CSV dans lequel on trouve des informations de couleur (R-G-B).\nPlus d'informations : http://www.gis-blog.fr/2015/03/08/generer-un-style-categorise-sur-qgis-a-partir-dun-fichier-csv/", "Outline": "EN : Check to add a black outline.\n\nFR : Cocher pour ajouter une bordure noir.", "ALG_CREATOR": "Florian Boret", "Vector_layer": "EN : Vector layer on which you want to apply the style.\n\nFR : Couche vecteur sur laquelle on souhaite appliquer le style.", "CSV_Encoding": "EN : Encoding of the CSV file.\nExemple : latin1, utf-8,...\n\nFR : Encodage du fichier CSV.\nExemple : latin1, utf-8,...", "Outline_width": "EN : Outline thickness.\n\nFR : Epaisseur de la bordure.", "Column_green": "EN : Define the \"green\" column in the CSV file.\nExemple : Column 3\n\nFR : D\u00e9finir la colonne \"vert\" dans le fichier CSV.\nExemple : Colonne 3", "CSV_file_with_semicolon_delimiter": "EN : Delimited file in CSV format (delimiter : semicolon).\n\nFR : Fichier d\u00e9limit\u00e9 au format CSV (d\u00e9limiteur : point-virgule).\nExemple : Nomenclature Corine Land Cover", "Column_value": "EN : Define the value column in the CSV file.\nExemple : Column 0\n\nFR : D\u00e9finir la valeur de la colonne dans le fichier CSV.\nExemple : Colonne 0", "Column_label": "EN : Define the label column in the CSV file.\nExemple : Column 1\n\nFR : D\u00e9finir la colonne de l'\u00e9tiquette dans le fichier CSV.\nExemple : Colonne 1", "Column_red": "EN : Define the \"red\" column in the CSV file\nExemple : Column 2\n\nFR : D\u00e9finir la colonne du \"rouge\" dans le fichier CSV.\nExemple : Colonne 2", "Column_blue": "EN : Define the \"blue\" column in the CSV file.\nExemple : Column 4\n\nFR : D\u00e9finir la colonne \"bleu\" dans le fichier CSV.\nExemple : Colonne 4", "ALG_HELP_CREATOR": "Florian Boret", "Transparency": "EN : Transparency between 0 and 1.\n0: Transparency\n1: No transparency\n\nFR : Transparence comprise entre 0 et 1.\n0 : Transparence\n1 : Pas de transparence", "ALG_VERSION": "1.1", "Save_layer_style_as_default": "EN : Check to create an QML file associated with the vector layer.\n\nFR : Cocher pour cr\u00e9er un fichier QML associ\u00e9 \u00e0 la couche vecteur."}
--------------------------------------------------------------------------------
/scripts/Generate_Unique_values_renderer.py:
--------------------------------------------------------------------------------
1 | ##Raster=group
2 | ##Generate unique values style=name
3 | ##Raster_to_extract_unique_values=raster
4 | ##round_values_to_ndigits=number 0
5 |
6 | from osgeo import gdal
7 | from random import randint
8 | import math
9 | from PyQt4.QtCore import *
10 | from PyQt4.QtGui import *
11 | from qgis.core import *
12 | from qgis.utils import iface
13 |
14 | # Rename verbose input vars
15 | input = Raster_to_extract_unique_values
16 | rdig = round_values_to_ndigits
17 |
18 | # Initialize unique values list
19 | sort_values = []
20 | # create set for unique values list
21 | cell_values = set()
22 |
23 | # load raster
24 | gdalData = gdal.Open(str(input))
25 |
26 | # get width and heights of the raster
27 | xsize = gdalData.RasterXSize
28 | ysize = gdalData.RasterYSize
29 |
30 | # get number of bands
31 | bands = gdalData.RasterCount
32 |
33 | # process the raster
34 | for i in xrange(1, bands + 1):
35 | progress.setText("processing band " + str(i) + " of " + str(bands))
36 | band_i = gdalData.GetRasterBand(i)
37 | raster = band_i.ReadAsArray() # This loads the entire raster into memory!
38 | # count unique values for the given band
39 | for col in range( xsize ):
40 | if col % 10 == 0: progress.setPercentage(int(100*col/xsize))
41 | for row in range( ysize ):
42 | cell_value = raster[row, col]
43 | # check if cell_value is NaN - don't add if it is
44 | if not math.isnan(cell_value):
45 | # round floats if needed
46 | if rdig:
47 | try:
48 | cell_value = round(cell_value, int(rdig))
49 | except:
50 | cell_value = round(cell_value)
51 | # Add to the unique values set
52 | cell_values.add(cell_value)
53 |
54 | del(gdalData)
55 |
56 | # decide whether to sort by the count-column or the value-column
57 | sort_values = sorted(cell_values)
58 |
59 | # Now load the layer and apply styling
60 | layer = processing.getObjectFromUri(input)
61 |
62 | qCRS = QgsColorRampShader()
63 |
64 | # Build the colour ramp using random colours
65 | colList = ['#ff0000','#ffff00','#0000ff','#00ffff','#00ff00','#ff00ff']
66 |
67 | lst = []
68 | for i,val in enumerate(sort_values):
69 | lst.append(QgsColorRampShader.ColorRampItem(val,QColor(colList[i % 6])))
70 |
71 | qCRS.setColorRampItemList(lst)
72 | qCRS.setColorRampType(QgsColorRampShader.EXACT)
73 |
74 | shader = QgsRasterShader()
75 | shader.setRasterShaderFunction(qCRS)
76 |
77 | renderer = QgsSingleBandPseudoColorRenderer(layer.dataProvider(), layer.type(), shader)
78 | layer.setRenderer(renderer)
79 | layer.triggerRepaint()
--------------------------------------------------------------------------------
/scripts/Contour.py:
--------------------------------------------------------------------------------
1 | ##Points=vector
2 | ##Value_field=field Points
3 | ##Levels=string 0;10;20
4 | ##Group_by_field=boolean True
5 | ##Group_Field=field Points
6 | ##Results=output vector
7 |
8 | from shapely.geometry import MultiPolygon
9 | from qgis.core import *
10 | from PyQt4.QtCore import *
11 | from processing.tools.vector import VectorWriter
12 | import numpy as np
13 | import matplotlib.pyplot as plt
14 |
15 |
16 | levels = [float(x) for x in Levels.split(";")]
17 |
18 | nodeLayer = processing.getObject(Points)
19 | nodePrder = nodeLayer.dataProvider()
20 | n = nodeLayer.featureCount()
21 | l = 0
22 |
23 | pts = {}
24 |
25 |
26 | for feat in processing.features(nodeLayer):
27 | progress.setPercentage(int(100*l/n))
28 | l+=1
29 |
30 | if Group_by_field:
31 | k = feat[Group_Field]
32 | else:
33 | k = 'a'
34 |
35 | geom = feat.geometry().asPoint()
36 |
37 | pts.setdefault(k, {'x':[], 'y':[], 'v':[]})
38 |
39 | pts[k]['x'].append(geom.x())
40 | pts[k]['y'].append(geom.y())
41 | pts[k]['v'].append(feat[Value_field])
42 |
43 |
44 | if Group_by_field:
45 | fields = [QgsField(Group_Field, QVariant.String),
46 | QgsField('min', QVariant.Double),
47 | QgsField('max', QVariant.Double)]
48 | else:
49 | fields = [QgsField('min', QVariant.Double),
50 | QgsField('max', QVariant.Double)]
51 |
52 | writer = VectorWriter(Results, None, fields, QGis.WKBMultiPolygon, nodePrder.crs())
53 |
54 | feat = QgsFeature()
55 |
56 | n = len(pts)
57 | l = 0
58 |
59 | for k in pts.keys():
60 | progress.setPercentage(int(100*l/n))
61 | l+=1
62 |
63 | if Group_by_field:
64 | attrs = [k]
65 | else:
66 | attrs = []
67 |
68 | # convert each sublist in a numpy array
69 |
70 | x = np.array(pts[k]['x'])
71 | y = np.array(pts[k]['y'])
72 | v = np.array(pts[k]['v'])
73 |
74 | cs = plt.tricontourf(x, y, v, levels, extend='neither')
75 |
76 | for i, polygon in enumerate(cs.collections):
77 |
78 | mpoly = []
79 |
80 | for path in polygon.get_paths():
81 | path.should_simplify = False
82 | poly = path.to_polygons()
83 | exterior = []
84 | holes = []
85 |
86 | if len(poly) > 0:
87 | exterior = poly[0]
88 | if len(poly) > 1: # There's some holes
89 | holes = [h for h in poly[1:] if len(h) > 2]
90 |
91 | mpoly.append([exterior, holes])
92 |
93 | if len(mpoly) > 0:
94 | if Group_by_field:
95 | attrs = [k, levels[i], levels[i+1]]
96 | else:
97 | attrs = [levels[i], levels[i+1]]
98 | feat.setAttributes(attrs)
99 | feat.setGeometry(QgsGeometry.fromWkt(MultiPolygon(mpoly).to_wkt()))
100 | writer.addFeature(feat)
101 |
102 | del writer
103 |
--------------------------------------------------------------------------------
/scripts/Points on crossing lines.py:
--------------------------------------------------------------------------------
1 | ##Lines=vector
2 | ##Point_grouping_buffer=number 10
3 | ##Results=output vector
4 |
5 | from qgis.core import *
6 | from PyQt4.QtCore import *
7 | from processing.tools.vector import VectorWriter
8 |
9 |
10 | def buffRect(point, b):
11 | x = point.x()
12 | y = point.y()
13 | return QgsRectangle(x - b, y - b, x + b, y + b)
14 |
15 | buff = Point_grouping_buffer
16 | cutLayer = processing.getObject(Lines)
17 | cutPrder = cutLayer.dataProvider()
18 | n = cutLayer.featureCount()
19 | l = 0
20 |
21 | # build spatial index of lines
22 |
23 | index = QgsSpatialIndex()
24 | geom_ix = {}
25 | progress.setText("Index lines...")
26 |
27 | for feat in processing.features(cutLayer):
28 | progress.setPercentage(int(100*l/n))
29 | l+=1
30 |
31 | index.insertFeature(feat)
32 | geom_ix[feat.id()] = feat.geometry().asWkb()
33 |
34 |
35 |
36 | # find points on crossing lines
37 | progress.setText("Find crossing points...")
38 |
39 | l = 0
40 | i = 0
41 | ptindex = QgsSpatialIndex()
42 | pt_ix = {}
43 | secgeom = QgsGeometry()
44 | featgeom = QgsGeometry()
45 | resfeat = QgsFeature()
46 |
47 | for feat in processing.features(cutLayer):
48 | progress.setPercentage(int(100*l/n))
49 | l+=1
50 |
51 | near = index.intersects(feat.geometry().boundingBox())
52 |
53 | for f in [x for x in near if x != feat.id()]: # exclude self
54 |
55 | featgeom = feat.geometry()
56 | secgeom.fromWkb(geom_ix[f])
57 |
58 | if featgeom.crosses(secgeom):
59 | crosspts = feat.geometry().intersection(secgeom).asGeometryCollection()
60 |
61 | for pt in crosspts:
62 | i += 1
63 |
64 | # index point
65 | resfeat.setGeometry(pt)
66 | resfeat.setFeatureId(i)
67 | ptindex.insertFeature(resfeat)
68 | pt_ix[i] = pt.asPoint()
69 |
70 |
71 | feat = QgsFeature()
72 | fields = [QgsField("nodeid", QVariant.Int)]
73 | writer = VectorWriter(Results, None, fields, QGis.WKBPoint, cutPrder.crs())
74 |
75 |
76 | # only save unique points
77 | progress.setText("Save unique points...")
78 | n = len(pt_ix)
79 | featgeom = QgsGeometry()
80 |
81 |
82 | while len(pt_ix) != 0:
83 | progress.setPercentage(int(100*(n-len(pt_ix))/n))
84 |
85 | i = pt_ix.keys()[0]
86 |
87 | # write point
88 |
89 | attrs = [i]
90 | feat.setGeometry(featgeom.fromPoint(pt_ix[i]))
91 | feat.setAttributes(attrs)
92 | writer.addFeature(feat)
93 |
94 | # delete close points
95 | near = ptindex.intersects(buffRect(pt_ix[i], buff))
96 |
97 | for pt in near:
98 | feat.setFeatureId(pt)
99 | feat.setGeometry(featgeom.fromPoint(pt_ix[pt]))
100 | deleted = ptindex.deleteFeature(feat)
101 | del pt_ix[pt]
102 |
103 | del writer
104 |
--------------------------------------------------------------------------------
/rscripts/list.txt:
--------------------------------------------------------------------------------
1 | A-star.rsx,1,A-star
2 | ACP_cercle.rsx,1,ACP cercle
3 | ACP_contribution.rsx,1,ACP contribution
4 | ACP_individus.rsx,1,ACP individus
5 | ACP_var.rsx,1,ACP var
6 | AFC.rsx,1,AFC
7 | AFDM.rsx,1,AFDM
8 | ANOVA.rsx,1,ANOVA
9 | Advanced_raster_histogram.rsx,1,Advanced raster histogram
10 | Alpha_shape.rsx,1,Alpha shape
11 | Autocor_spatiale.rsx,1,Autocor spatiale
12 | CAH.rsx,1,CAH
13 | CART.rsx,1,CART
14 | Close_neighbor.rsx,1,Close neighbor
15 | Density_curve.rsx,1,Density curve
16 | Distance.rsx,1,Distance
17 | Douglas-Peucker.rsx,1,Douglas-Peucker
18 | Douglas-Peucker_with_choice.rsx,1,Douglas-Peucker with choice
19 | Extract_points_from_line.rsx,1,Extract points from line
20 | F_function.rsx,1,F function
21 | Frequency_table.rsx,1,Frequency table
22 | G_function.rsx,1,G function
23 | Histogram.rsx,1,Histogram
24 | Inverse_Distance_Weigthing.rsx,1,Inverse Distance Weigthing
25 | Inverse_Distance_Weigthing_with_method_selection.rsx,1,Inverse Distance Weigthing with method selection
26 | Kernel_density_estimation.rsx,1,Kernel density estimation
27 | Kolmogrov-Smirnov_test.rsx,1,Kolmogrov-Smirnov test
28 | Kriging.rsx,1,Kriging
29 | Kriging_with_model_selection.rsx,1,Kriging with model selection
30 | Minimum_convex_polygon.rsx,1,Minimum convex polygon
31 | Monte-Carlo_spatial_randomness.rsx,1,Monte-Carlo spatial randomness
32 | Multiple_Regression.rsx,1,Multiple Regression
33 | Ordinary Kriging.rsx,1.0,Ordinary Kriging
34 | Polygone.rsx,1,Polygone
35 | Quadrat_analysis.rsx,1,Quadrat analysis
36 | Random_sampling_grid.rsx,1,Random sampling grid
37 | Raster_histogram.rsx,1,Raster histogram
38 | Regular_sampling_grid.rsx,1,Regular sampling grid
39 | Relative_distribution_(distance_covariate).rsx,1,Relative distribution (distance covariate)
40 | Relative_distribution_(raster_covariate).rsx,1,Relative distribution (raster covariate)
41 | Ripley_-_Rasson_spatial_domain.rsx,1,Ripley - Rasson spatial domain
42 | Selection_Cp.rsx,1,Selection Cp
43 | Selection_with_Bayesian_Information_Criterion.rsx,1,Selection with Bayesian Information Criterion
44 | Selection_with_criterion_choice.rsx,1,Selection with criterion choice
45 | Selection_with_r2.rsx,1,Selection with r2
46 | Selection_with_r2_adjusted.rsx,1,Selection with r2 adjusted
47 | Simple_Linear_Regression.rsx,1,Simple Linear Regression
48 | Summarize_by_field.rsx,1,Summarize by field
49 | Summarize_by_two_fields.rsx,1,Summarize by two fields
50 | Summary_statistics.rsx,1,Summary statistics
51 | Tobler.rsx,1,Tobler
52 | Variogram Modelling.rsx,0.7,Variogram Modelling
53 | frequency_plot.rsx,0.1,frequency plot
54 | ggplot_scatterplot.rsx,1.0,ggplot scatterplot
55 | qqplot.rsx,1.0,qqplot
56 | scatterplot_log.rsx,1.0,scatterplot log
57 | scatterplot_regressione.rsx,1,scatterplot regressione
58 | scatterplot_types.rsx,1.0,scatterplot types
59 | ternaryPlots.rsx,0.1,ternaryPlots
--------------------------------------------------------------------------------
/scripts/Cadastre_FR_WMS.py:
--------------------------------------------------------------------------------
1 | ##Cadastre FR=group
2 | ##Cadastre FR - WMS - Add a cadastral map=name
3 |
4 | ##Vector_layer_of_communes=vector
5 | ##INSEE_code=field Vector_layer_of_communes
6 | ##Commune_name=field Vector_layer_of_communes
7 | ##EPSG_code=string 2154
8 |
9 | from qgis.core import QgsRasterLayer,QgsMapLayerRegistry
10 | from qgis.utils import iface,QgsMessageBar
11 |
12 | layer = processing.getObject(Vector_layer_of_communes)
13 |
14 | if EPSG_code == '2154' or EPSG_code == '3942' or EPSG_code == '3943' or EPSG_code == '3944' or EPSG_code == '3945' or EPSG_code == '3946' or EPSG_code == '3947' or EPSG_code == '3948' or EPSG_code == '3949' or EPSG_code == '3950' or EPSG_code == '32630' or EPSG_code == ' 32631' or EPSG_code == '32632' or EPSG_code == '3857' or EPSG_code == '4326' or EPSG_code == '4258' or EPSG_code == '32620' or EPSG_code == '2970' or EPSG_code == '2972' or EPSG_code == '2973' or EPSG_code == '2975' or EPSG_code == '32622' or EPSG_code == '32740' or EPSG_code == '32738' or EPSG_code == '4471' or EPSG_code == '32621' :
15 | progress.setText(u'EPSG code : ' + EPSG_code)
16 |
17 | tab = []
18 |
19 | for f in layer.getFeatures():
20 |
21 | col_select =str(f[INSEE_code]),f[Commune_name]
22 | tab.append(col_select)
23 |
24 | #Permet la suppression des doublons
25 | Lt= list(set(tab))
26 | Lt.sort()
27 |
28 | for c_insee, n_couche in Lt :
29 |
30 | #AMORCES_CAD,LIEUDIT,CP.CadastralParcel,SUBFISCAL,CLOTURE,DETAIL_TOPO,HYDRO,VOIE_COMMUNICATION,BU.Building,BORNE_REPERE
31 | urlWithParams = "url=http://inspire.cadastre.gouv.fr/scpc/"+c_insee+".wms?contextualWMSLegend=0&crs=EPSG:"+EPSG_code+"&dpiMode=7&featureCount=10&format=image/png&layers=AMORCES_CAD&layers=LIEUDIT&layers=CP.CadastralParcel&layers=SUBFISCAL&layers=CLOTURE&layers=DETAIL_TOPO&layers=HYDRO&layers=VOIE_COMMUNICATION&layers=BU.Building&layers=BORNE_REPERE&styles=&styles=&styles=&styles=&styles=&styles=&styles=&styles=&styles=&styles=&maxHeight=1024&maxWidth=1280"
32 |
33 | rlayer = QgsRasterLayer(urlWithParams, 'Cadastre_'+n_couche+'_'+c_insee, 'wms')
34 |
35 | progress.setText(u'Commune name : ' + n_couche+' - '+c_insee)
36 | progress.setText(u'Validity of WMS : %s' % rlayer.isValid())
37 |
38 | QgsMapLayerRegistry.instance().addMapLayer(rlayer)
39 |
40 | if rlayer.isValid() == True :
41 | iface.messageBar().pushMessage("Information :", "Adding a cadastral map : "+n_couche, QgsMessageBar.INFO, duration=5)
42 | iface.mapCanvas().refresh()
43 |
44 | else :
45 | iface.messageBar().pushMessage("Warning :", "WMS invalid : "+n_couche, QgsMessageBar.WARNING, duration=15)
46 |
47 | else :
48 | iface.messageBar().pushMessage("Warning :", "EPSG is unknown ", QgsMessageBar.WARNING, duration=15)
49 | progress.setText(u'EPSG is unknown ')
50 |
--------------------------------------------------------------------------------
/scripts/Cadastre_FR_WMS.py.help:
--------------------------------------------------------------------------------
1 | {"Commune_name": "EN : Field commune name\n==================\nFR : Champ correspondant au nom de commune", "ALG_DESC": "EN : This script adds the WMS cadastral maps of communes from a vector file of communes.\n\nURL : http://inspire.cadastre.gouv.fr/scpc/[INSEE_code].wms?\nVariable : [INSEE_code]\n\nMore information at this address : https://www.cadastre.gouv.fr/scpc/pdf/Guide_WMS_fr.pdf\n==================\nFR : Le script permet d'ajouter le cadastre WMS de plusieurs communes \u00e0 partir d'un fichier vectoriel de communes.\n\nURL : http://inspire.cadastre.gouv.fr/scpc/[INSEE_code].wms?\nVariable : [INSEE_code]\n\nPlus d'informations \u00e0 cette adresse : https://www.cadastre.gouv.fr/scpc/pdf/Guide_WMS_fr.pdf", "null": "", "Code_INSEE": "Choisir le champ correspondant au code INSEE.", "EPSG_code": "EN : Select the coordinate system.\nDefault Code: EPSG: 2154\n\nPossible coordinate systems:\n2154, 3942, 3943, 3944, 3945, 3946, 3947, 3948, 3949, 3950, 32630, 32631, 32632, 3857, 4326, 4258, 32620, 2970, 2972, 2973, 2975, 32622, 32740, 32738, 4471, 32621\n==================\nFR :Choisir le syst\u00e8me de coordonn\u00e9es. \nCode par d\u00e9faut : EPSG:2154\n\nSyst\u00e8mes de coordonn\u00e9es possibles :\n2154, 3942, 3943, 3944, 3945, 3946, 3947, 3948, 3949, 3950, 32630, 32631, 32632, 3857, 4326, 4258, 32620, 2970, 2972, 2973, 2975, 32622, 32740, 32738, 4471, 32621", "ALG_CREATOR": "Florian Boret", "Vector_layer": "", "Couche_commune": "Choisir une couche vectorielle listant les communes \u00e0 afficher.", "Champ_code_INSEE": "Choisir le champ correspondant aux codes INSEE.", "INSEE_code": "EN : Field INSEE code\n\nWikipedia : The INSEE code is a numerical indexing code used by the French National Institute for Statistics and Economic Studies (INSEE) to identify various entities, including communes, d\u00e9partements.\n==================\nFR : Champ correspondant au code INSEE", "INSEE code": "EN : Field INSEE code.\n\nWikipedia : The INSEE code is a numerical indexing code used by the French National Institute for Statistics and Economic Studies (INSEE) to identify various entities, including communes, d\u00e9partements.\n\nFR : Champ du code INSEE", "ALG_VERSION": "1.0", "ALG_HELP_CREATOR": "Florian Boret", "Couche_vecteur": "Choisir la couche des communes", "Vector_layer_of_communes": "EN : Select a vector layer of communes.\n\nA commune is an administrative district in France.\n\n==================\nFR : S\u00e9lectionner la couche vectorielle des communes.\n", "Champ_nom_de_commune": "Choisir le champ correspondant aux noms de communes.", "Nom_de_la_commune": "Choisir le champ correspondant au nom de commune.", "Code_EPSG": "Choisir le syst\u00e8me de coordonn\u00e9es. \nCode par d\u00e9faut : EPSG:2154\n\nSyst\u00e8mes de coordonn\u00e9es possibles :\n2154, 3942, 3943, 3944, 3945, 3946, 3947, 3948, 3949, 3950, 32630, 32631, 32632, 3857, 4326, 4258, 32620, 2970, 2972, 2973, 2975, 32622, 32740, 32738, 4471, 32621"}
--------------------------------------------------------------------------------
/scripts/list.txt:
--------------------------------------------------------------------------------
1 | Assign_prj.py,0.1,Assign prj
2 | Batch_replace_in_string_via_regex_dictionary.py,1,Batch replace in string via regex dictionary
3 | Buffer Contour.py,1.0,Buffer Contour
4 | CSV_R-G-B_to_categorized_style.py,1.1,CSV R-G-B to categorized style
5 | CSV_RGB_or_HEX_to_categorized_style.py,1.1,CSV RGB or HEX to categorized style
6 | Cadastre_FR_WMS.py,1.0,Cadastre FR WMS
7 | Contour.py,1,Contour
8 | Create_rasters_from_canvas_for_each_vector_layer_feature_extent.py,1,Create rasters from canvas for each vector layer feature extent
9 | Create_tiling_from_vector_layer.py,1,Create tiling from vector layer
10 | Create_vector_layer_from_Postgis_table.py,1.0,Create vector layer from Postgis table
11 | Create_vector_layer_from_SQL_Query.py,1.0,Create vector layer from SQL Query
12 | Cut_by_field.py,1,Cut by field
13 | Define_1_raster_layer_properties.py,1.0,Define 1 raster layer properties
14 | Define_1_vector_layer_properties.py,1.0,Define 1 vector layer properties
15 | Define_vector_layer_properties.py,1.0,Define vector layer properties
16 | DissolveWithStats.py,0.4,DissolveWithStats
17 | EquivalentNumField.py,1.0,EquivalentNumField
18 | Extract_raster_values_to_CSV.py,1,Extract raster values to CSV
19 | Extract_raster_values_to_shapefile.py,1,Extract raster values to shapefile
20 | Fill_holes.py,1.0,Fill holes
21 | FrequencyStats.py,0.1,FrequencyStats
22 | Generate_Unique_values_renderer.py,1,Generate Unique values renderer
23 | Hex_grid_from_layer_bounds.py,1,Hex grid from layer bounds
24 | Keep_n_biggest_parts.py,1,Keep n biggest parts
25 | Merge_all_lines_in_layer.py,1.0,Merge all lines in layer
26 | Points on crossing lines.py,1,Points on crossing lines
27 | Points on touching lines.py,1,Points on touching lines
28 | Points_from_vector.py,1,Points from vector
29 | Quick_PostgreSQL_Model_Builder_from_description_stored_in_table_sheet.py,1.1,Quick PostgreSQL Model Builder from description stored in table sheet
30 | Read_file_content_into_string.py,1.0,Read file content into string
31 | Remove_parts.py,1,Remove parts
32 | Save_features_filtered_by_expression.py,1.0,Save features filtered by expression
33 | Save_selected_features.py,1,Save selected features
34 | Set_multiple_raster_layers_properties.py,1.0,Set multiple raster layers properties
35 | SilviLiDAR.py,1.0,SilviLiDAR
36 | Split_vector_layer_by_attribute.py,1,Split vector layer by attribute
37 | Square_grid_from_layer_extent.py,1.0,Square grid from layer extent
38 | Summarize.py,1,Summarize
39 | Unique_values_count.py,1,Unique values count
40 | classification_by_decision_tree.py,1,classification by decision tree
41 | distance_lines_between_points.py,1,distance lines between points
42 | ellipsoidal_area.py,1.0,ellipsoidal area
43 | make_landsat8_footprints.py,1,make landsat8 footprints
44 | predominant_category.py,1,predominant category
45 | pygraticule.py,1.1,pygraticule
46 | realcentroid_algorithm.py,1.0,realcentroid algorithm
47 | spatial_cross_join_attributes.py,1.0,spatial cross join attributes
--------------------------------------------------------------------------------
/scripts/Points on touching lines.py:
--------------------------------------------------------------------------------
1 | ##Lines=vector
2 | ##Point_grouping_buffer=number 10
3 | ##Keep_lines_end=boolean false
4 | ##Results=output vector
5 |
6 | from qgis.core import *
7 | from PyQt4.QtCore import *
8 | from processing.core.VectorWriter import VectorWriter
9 |
10 |
11 | def buffRect(point, b):
12 | x = point.x()
13 | y = point.y()
14 | return QgsRectangle(x - b, y - b, x + b, y + b)
15 |
16 | buff = Point_grouping_buffer
17 | sqbf = buff * buff
18 | cutLayer = processing.getObject(Lines)
19 | cutPrder = cutLayer.dataProvider()
20 | n = cutLayer.featureCount()
21 | l = 0
22 |
23 | # build spatial index of lines
24 |
25 | index = QgsSpatialIndex()
26 | geom_ix = {}
27 | progress.setText("Index lines...")
28 |
29 | for feat in processing.features(cutLayer):
30 | progress.setPercentage(int(100*l/n))
31 | l+=1
32 |
33 | index.insertFeature(feat)
34 | geom_ix[feat.id()] = feat.geometry().asWkb()
35 |
36 |
37 |
38 | # find points on crossing lines
39 | progress.setText("Find touching points...")
40 |
41 | l = 0
42 | i = 0
43 | ptindex = QgsSpatialIndex()
44 | pt_ix = {}
45 | sgeom = QgsGeometry()
46 | fgeom = QgsGeometry()
47 | resfeat = QgsFeature()
48 |
49 | for feat in processing.features(cutLayer):
50 | progress.setPercentage(int(100*l/n))
51 | l+=1
52 |
53 | near = index.intersects(feat.geometry().boundingBox())
54 |
55 | for f in [x for x in near if x != feat.id()]: # exclude self
56 |
57 | fgeom = feat.geometry()
58 | sgeom.fromWkb(geom_ix[f])
59 |
60 | if fgeom.touches(sgeom):
61 | crosspts = feat.geometry().intersection(sgeom).asGeometryCollection()
62 |
63 | for pt in crosspts:
64 |
65 | refpt = pt.asPoint()
66 |
67 | endpts = [fgeom.vertexAt(0), sgeom.vertexAt(0),
68 | fgeom.vertexAt(len(fgeom.asPolyline())-1),
69 | sgeom.vertexAt(len(sgeom.asPolyline())-1)]
70 | dist = sorted([refpt.sqrDist(x) for x in endpts])
71 |
72 | if Keep_lines_end or dist[1] > sqbf: # index point
73 | i += 1
74 | resfeat.setGeometry(pt)
75 | resfeat.setFeatureId(i)
76 | ptindex.insertFeature(resfeat)
77 | pt_ix[i] = pt.asPoint()
78 |
79 |
80 | feat = QgsFeature()
81 | fields = [QgsField("nodeid", QVariant.Int)]
82 | writer = VectorWriter(Results, None, fields, QGis.WKBPoint, cutPrder.crs())
83 |
84 |
85 | # only save unique points
86 | progress.setText("Save unique points...")
87 | n = len(pt_ix)
88 | fgeom = QgsGeometry()
89 |
90 |
91 | while len(pt_ix) != 0:
92 | progress.setPercentage(int(100*(n-len(pt_ix))/n))
93 |
94 | i = pt_ix.keys()[0]
95 |
96 | # write point
97 |
98 | attrs = [i]
99 | feat.setGeometry(fgeom.fromPoint(pt_ix[i]))
100 | feat.setAttributes(attrs)
101 | writer.addFeature(feat)
102 |
103 | # delete close points
104 | near = ptindex.intersects(buffRect(pt_ix[i], buff))
105 |
106 | for pt in near:
107 | feat.setFeatureId(pt)
108 | feat.setGeometry(fgeom.fromPoint(pt_ix[pt]))
109 | deleted = ptindex.deleteFeature(feat)
110 | del pt_ix[pt]
111 |
112 | del writer
--------------------------------------------------------------------------------
/scripts/Create_vector_layer_from_SQL_Query.py:
--------------------------------------------------------------------------------
1 | ##Database=group
2 | ##Create vector layer from SQL Query=name
3 | ##Database_type=selection postgis;spatialite
4 | ##Connection_name=string
5 | ##Query=longstring
6 | ##Geometry_field_name=string geom
7 | ##Unique_id_field_name=string id
8 | ##Avoid_select_by_id=boolean True
9 | ##output=output vector
10 |
11 | from qgis.core import *
12 | from db_manager.db_plugins.plugin import DBPlugin, Schema, Table, BaseError
13 | from db_manager.db_plugins import createDbPlugin
14 | from db_manager.dlg_db_error import DlgDbError
15 | from processing.tools.vector import VectorWriter
16 |
17 | connectionName = unicode(Connection_name)
18 | dbTypeMap = { 0: 'postgis', 1: 'spatialite' }
19 | dbType = dbTypeMap[Database_type]
20 |
21 | progress.setText('%s' % dbType)
22 |
23 | # Get database connection via DbManager classes
24 | connection = None
25 | if connectionName:
26 | dbpluginclass = createDbPlugin( dbType, connectionName )
27 | if dbpluginclass:
28 | try:
29 | connection = dbpluginclass.connect()
30 | except BaseError as e:
31 | progress.setText(e.msg)
32 | else:
33 | progress.setText('## Please give a database connection name.')
34 |
35 | # Run the Query and create vector layer
36 | layer = None
37 | if connection:
38 | db = dbpluginclass.database()
39 | if db:
40 |
41 | # get a new layer name
42 | names = []
43 | for layer in QgsMapLayerRegistry.instance().mapLayers().values():
44 | names.append( layer.name() )
45 |
46 | newLayerName = "vlayer"
47 | i = 0
48 | while newLayerName in names:
49 | i+=1
50 | newLayerName = u"%s_%d" % (layerName, i)
51 |
52 | # Create layer from query result
53 | layer = db.toSqlLayer(
54 | Query,
55 | Geometry_field_name,
56 | Unique_id_field_name,
57 | newLayerName,
58 | QgsMapLayer.VectorLayer,
59 | Avoid_select_by_id
60 | )
61 | if layer.isValid():
62 |
63 | # Create writer
64 | writer = VectorWriter(
65 | output,
66 | None,
67 | layer.dataProvider().fields(),
68 | layer.dataProvider().geometryType(),
69 | layer.crs()
70 | )
71 |
72 | # Export features
73 | features = layer.getFeatures()
74 | for feat in features:
75 | writer.addFeature(feat)
76 |
77 | del writer
78 |
79 | # Log number of features retrieves
80 | progress.setText('|| The query returned %s features' % layer.featureCount())
81 |
82 | else:
83 | progress.setText('## The layer is invalid - Please check your query')
84 |
85 | else:
86 | progress.setText('## Database cannot be accessed')
87 |
88 | else:
89 | progress.setText('## Cannot connect to the specified database connection name: "%s".' % connectionName)
90 |
--------------------------------------------------------------------------------
/scripts/ellipsoidal_area.py:
--------------------------------------------------------------------------------
1 | ##Ellipsoidal Area=name
2 | ##Utils=group
3 | ##input=vector polygon
4 | ##ellipsoid=string WGS84
5 | ##new_field=string Area
6 | ##units=selection sq_km;sq_m;sq_miles;sq_ft;sq_nm;sq_degrees
7 | ##output=output vector
8 |
9 | from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
10 | from processing.tools.vector import VectorWriter
11 | from PyQt4.QtCore import *
12 | from qgis.core import *
13 |
14 |
15 | measure_units_dict = {0: 'sq_km', 1: 'sq_m', 2: 'sq_miles', 3: 'sq_ft',
16 | 4: 'sq_nm', 5:'sq_degrees'}
17 | units_selection = measure_units_dict[units]
18 | input_layer = processing.getObject(input)
19 |
20 | if not input_layer.crs().geographicFlag():
21 | raise GeoAlgorithmExecutionException(
22 | 'Your layer has a Projected CRS. '
23 | 'This script works only on layers with Geographic CRS.')
24 |
25 | fields = QgsFields()
26 | for field in input_layer.pendingFields():
27 | if field.name().lower() == new_field.lower():
28 | raise GeoAlgorithmExecutionException(
29 | 'The input layer already has a field named %s.'
30 | 'Please choose a different name for the Area field.' % new_field)
31 |
32 | fields.append(field)
33 | fields.append(QgsField(new_field, QVariant.Double))
34 |
35 | writer = VectorWriter(output, None, fields,
36 | QGis.WKBMultiPolygon, input_layer.crs())
37 | # Initialize QgsDistanceArea object
38 | area = QgsDistanceArea()
39 | area.setEllipsoid(ellipsoid)
40 | area.setEllipsoidalMode(True)
41 | area.computeAreaInit()
42 |
43 | out_f = QgsFeature()
44 |
45 | # Get feature count for progress bar
46 | features = processing.features(input_layer)
47 | num_features = len(features)
48 |
49 | for i, feat in enumerate(features):
50 | progress.setPercentage(int(100 *i / num_features))
51 | geom = feat.geometry()
52 | polygon_area = 0
53 | if geom.isMultipart():
54 | polygons = geom.asMultiPolygon()
55 | for polygon in polygons:
56 | polygon_area += area.measurePolygon(polygon[0])
57 | else:
58 | polygon = geom.asPolygon()
59 | polygon_area = area.measurePolygon(polygon[0])
60 |
61 | # calculated area is in sq. metres
62 | if units_selection == 'sq_km':
63 | final_area = polygon_area / 1e6
64 | elif units_selection == 'sq_ft':
65 | final_area = area.convertMeasurement(
66 | polygon_area, QGis.Meters, QGis.Feet, True)[0]
67 | elif units_selection == 'sq_miles':
68 | final_area = area.convertMeasurement(
69 | polygon_area, QGis.Meters, QGis.Feet, True)[0] / (5280.0 * 5280.0)
70 | elif units_selection == 'sq_nm':
71 | final_area = area.convertMeasurement(
72 | polygon_area, QGis.Meters, QGis.NauticalMiles, True)[0]
73 | elif units_selection == 'sq_degrees':
74 | final_area = area.convertMeasurement(
75 | polygon_area, QGis.Meters, QGis.Degrees, True)[0]
76 | else:
77 | final_area = polygon_area
78 |
79 | attrs = feat.attributes()
80 | attrs.append(final_area)
81 | out_f.setGeometry(geom)
82 | out_f.setAttributes(attrs)
83 | writer.addFeature(out_f)
84 |
85 | progress.setPercentage(100)
86 | del writer
87 |
--------------------------------------------------------------------------------
/scripts/Quick_PostgreSQL_Model_Builder_from_description_stored_in_table_sheet.py.help:
--------------------------------------------------------------------------------
1 | {"Field_with_table_comment": "Some comment for the table (won't be used if empty)", "ALG_DESC": "This script is intended to provide a very quick way to generate all the needed SQL code to create some tables in a PostgreSQL database. You just need to store some information about tables and columns in 2 simple data sheet ( such as a CSV file or DBF file) written with LibreOffice (or MS Excel for example):\n\n* A table sheet with information on tables to be created : name, schema, comment\n\n* A table sheet with information on columns to be added to the tables: table name, column name, type, not null status, primary key status, constraint (only one), comment and index status.\n\nBasically, you just need to open LibreOffice (or other software which can write in sheets) , and; \n\n* create 2 files with the needed columns,\n* fill each table sheet with the needed information,\n* export them to CSV or DBF,\n* open them in QGIS,\n* run the script.\n\nThe script will create the SQL content and:\n* save it to the destination file chosen by the user\n* output the SQL string to allow the use in models (for example with the PostGis run query tool)\n\nIt has been tested with PostgreSQL 9.3 and above.", "Layer_containing_columns_description": "QGIS layer containing information on columns to be added to database tables.", "ALG_CREATOR": "Micha\u00ebl DOUCHIN (3liz)", "Field_with_column_table_name": "The field which contains the name of the table for the column to create.", "sql": "A string output, which can be used when using this script inside a model, for example combined with the \"PostGIS Execute SQL\" alg", "Field_with_column_type": "Use only types allowed in PostgreSQL, for example :\n\n* integer\n* float\n* text\n* boolean\n* date\n* timestamp\netc.", "Field_with_column_not_null_status": "Use the string value '1' to set the column constraint NOT NULL", "Field_with_column_name": "Field containing the column name", "Field_with_column_index_status": "Use the string value '1' to set an automatic index on the column. This will need some improvement in a future version of the script.", "Layer_containing_tables_description": "The QGIS layer containing the definition of the tables. This layer must contain 4 columns, one for each information:\n\n* name of the table\n* schema of the table (won't be used if empty)\n* comment of the table (won't be used if empty)\n* primary key(s) for the table", "Field_with_table_name": "Table name", "ALG_VERSION": "1.1", "ALG_HELP_CREATOR": "Micha\u00ebl DOUCHIN (3liz)", "Field_with_table_schema": "Schema for the table (won't be used if empty)", "Field_with_column_comment": "You can add any comment. Single quotes are automatically escaped by the script.", "Field_with_table_primary_key": "List of primary key colmun separated by comma. Examples:\n\n* single column key: city_id\n* Double primary key: city_id, bus_id", "Output_SQL_file": "Output file created by the script as a result.\n\nPlease check this file before using it, and modify it to fill your need !", "Field_with_column_constraint": "Add description for a constraint (only one possible), with the name of the constraint and its definition. For example:\n\n* foreign key : use the following type of string\n\nmytable_mycol_fk FOREIGN KEY (mycol) REFERENCES myforeigntable (myfkey) ON DELETE CASCADE;\n\n* constraint on size :\n\nmycol_size_check CHECK (char_length(mycol) = 5);"}
--------------------------------------------------------------------------------
/scripts/Create_rasters_from_canvas_for_each_vector_layer_feature_extent.py:
--------------------------------------------------------------------------------
1 | ##Raster=group
2 | ##Create rasters from canvas for each vector feature extent=name
3 | ##vector_layer=vector
4 | ##scale=number 250000.0
5 | ##dpi=number 96
6 | ##image_width_mm=number 0
7 | ##image_height_mm=number 0
8 | ##output_directory=folder
9 | ##outputDir=output string
10 |
11 | from qgis.core import *
12 | from qgis.gui import *
13 | from qgis.utils import iface
14 | from PyQt4.QtCore import *
15 | from PyQt4.QtGui import *
16 | import os
17 |
18 | # Loop though grid layerfeatures
19 | layer = processing.getObject(vector_layer)
20 | provider = layer.dataProvider()
21 |
22 | # Get map renderer
23 | mapRenderer = iface.mapCanvas().mapRenderer()
24 |
25 | # Create a new composition
26 | c = QgsComposition(mapRenderer)
27 | c.setPrintResolution(dpi)
28 |
29 | # Add a composer map object
30 | x, y = 0, 0
31 | composerMap = QgsComposerMap(c, x, y, image_width_mm, image_height_mm)
32 | c.addComposerMap(composerMap)
33 | composerMap.setBackgroundEnabled(False)
34 |
35 | # World file
36 | c.setWorldFileMap(composerMap)
37 | c.setGenerateWorldFile( True )
38 |
39 | # Get vector layer features
40 | feats = processing.features(layer)
41 | nFeat = len(feats)
42 | i = 0
43 | progress.setText(u'%s tiles to be generated' % nFeat)
44 |
45 | # Iterate over the features
46 | for feat in feats:
47 | # Log progression
48 | progress.setPercentage(int(100 * i / nFeat))
49 | i+=1
50 |
51 | # Get the feature bouding box
52 | geom = feat.geometry()
53 | rect = geom.boundingBox()
54 |
55 | # Recalculate paper width and height if not given
56 | if not image_width_mm and scale > 0:
57 | image_width_mm = rect.width() * 1000 / scale
58 | if not image_height_mm and scale > 0:
59 | image_height_mm = rect.height() * 1000 / scale
60 |
61 | # Calculate image size in pixel
62 | inch2mm = 25.4
63 | image_width_pixel = int(image_width_mm * dpi / inch2mm)
64 | image_height_pixel = int(image_height_mm * dpi / inch2mm)
65 | progress.setText(u'Feature %s - Image width : %s * %s mm / %s * %s pixels' % (
66 | i,
67 | image_width_mm,
68 | image_height_mm,
69 | image_width_pixel,
70 | image_height_pixel
71 | )
72 | )
73 |
74 | # Set paper and composerMap width and height
75 | c.setPaperSize(image_width_mm, image_height_mm)
76 | composerMap.setItemPosition(x, y, image_width_mm, image_height_mm)
77 |
78 | # Set the map extent and scale
79 | composerMap.setNewExtent(rect)
80 | if scale > 0:
81 | composerMap.setNewScale(scale)
82 |
83 | # Image destination path
84 | outputPath= "tile_%s_%s" % (scale, i)
85 | outputImagePath = os.path.join(output_directory, outputPath + '.png')
86 |
87 | # Generate image from composition
88 | myImage = c.printPageAsRaster(0)
89 | myImage.save(outputImagePath)
90 |
91 | # Generate World file
92 | wf = c.computeWorldFileParameters()
93 | outputWorldPath = os.path.join(output_directory, outputPath + '.pgw')
94 | with open(outputWorldPath, 'w') as f:
95 | f.write('%s\n' % wf[0])
96 | f.write('%s\n' % wf[1])
97 | f.write('%s\n' % wf[3])
98 | f.write('%s\n' % wf[4])
99 | f.write('%s\n' % wf[2])
100 | f.write('%s\n' % wf[5])
101 |
102 | # export chosen output directory as a output variable
103 | outputDir = output_directory
104 |
--------------------------------------------------------------------------------
/rscripts/Ordinary Kriging.rsx:
--------------------------------------------------------------------------------
1 | ##[R-Geostatistics]=group
2 | ##showplots
3 | ##layer=vector
4 | ##field=field layer
5 | ##Estimate_range_and_psill_initial_values_from_sample_variogram=boolean True
6 | ##nugget=number 0
7 | ##model=selection Exp;Sph;Gau;Mat
8 | ##range=number 0
9 | ##psill=number 0
10 | ##Local_kriging=boolean False
11 | ##Number_of_nearest_observations=number 25
12 | ##Show_Sum_of_Square_Errors=boolean False
13 | ##Extent=selection Convex Hull; Layer Extent
14 | ##Resolution=number 0
15 | ##kriging_variance= output raster
16 | ##kriging_prediction= output raster
17 |
18 |
19 | library('gstat')
20 | library('sp')
21 | Models<-c("Exp","Sph","Gau","Mat")
22 | model2<-Models[model+1]
23 |
24 | create_new_data_ch <- function (layer)
25 | {
26 | convex_hull = chull(coordinates(layer)[, 1], coordinates(layer)[,2])
27 | convex_hull = c(convex_hull, convex_hull[1])
28 | d = Polygon(layer[convex_hull, ])
29 | if(!is.projected(layer) | Resolution== 0){new_data = spsample(d, 5000,
30 | type = "regular")}
31 | if(is.projected(layer) & Resolution!= 0){
32 | new_data = spsample(d, n= 1, cellsize=c(Resolution,Resolution),
33 | type="regular")}
34 | gridded(new_data) = TRUE
35 | attr(new_data, "proj4string") <-layer@proj4string
36 | return(new_data)
37 | }
38 |
39 | create_new_data_ext <- function (layer){
40 | bottomright <- c(layer@bbox[1], layer@bbox[2])
41 | topleft <- c(layer@bbox[3], layer@bbox[4])
42 | d <- SpatialPolygons(
43 | list(Polygons(list(Polygon(coords = matrix(
44 | c(topleft[1],bottomright[1], bottomright[1],topleft[1],topleft[1],
45 | topleft[2], topleft[2], bottomright[2],
46 | bottomright[2],topleft[2]), ncol=2, nrow= 5))), ID=1)))
47 | if(!is.projected(layer) | Resolution== 0){new_data = spsample(d, 5000,
48 | type = "regular")}
49 | if(is.projected(layer) & Resolution != 0){
50 | new_data = spsample(d, n= 1, cellsize=c(Resolution,Resolution),
51 | type="regular")}
52 | gridded(new_data) = TRUE
53 | attr(new_data, "proj4string") <-layer@proj4string
54 | return(new_data)
55 | }
56 |
57 | if(Extent==0){mask<-create_new_data_ch(layer)}
58 | if(Extent==1){mask<-create_new_data_ext(layer)}
59 |
60 | field <- make.names(field)
61 | names(layer)[names(layer)==field]="field"
62 |
63 | layer$field <- as.numeric(as.character(layer$field))
64 | str(layer)
65 | layer <- remove.duplicates(layer)
66 | layer <- layer[!is.na(layer$field),]
67 |
68 | g = gstat(id = field, formula = field~1, data = layer)
69 | vg = variogram(g)
70 |
71 | if(Estimate_range_and_psill_initial_values_from_sample_variogram){range=NA}
72 | if(Estimate_range_and_psill_initial_values_from_sample_variogram){psill=NA}
73 |
74 | vgm = vgm(nugget=nugget, psill=psill, range=range, model=model2)
75 | vgm = fit.variogram(vg, vgm)
76 | >vgm
77 | plot(vg, vgm, plot.numbers = TRUE)
78 | if(Local_kriging==FALSE){prediction = krige(field~1, layer, newdata = mask, vgm)}
79 | if(Local_kriging==TRUE){prediction = krige(field~1, layer, newdata = mask, vgm, nmax=Number_of_nearest_observations)}
80 | >if(Show_Sum_of_Square_Errors==TRUE){paste("SSE:", attr(vgm, "SSErr"))}
81 | #>if(!is.projected(layer)){warning(paste0("'layer' isn't projected.\n", "Resolution was not used. Interpolation was done over 5000 cells"))}
82 | #>if(is.projected(layer) & Resolution == 0){warning("Resolution was set to 0. Final resolution estimated from data")}
83 |
84 | kriging_prediction = raster(prediction)
85 | kriging_variance = raster(prediction["var1.var"])
86 |
--------------------------------------------------------------------------------
/scripts/Extract_raster_values_to_CSV.py:
--------------------------------------------------------------------------------
1 | ##Raster=group
2 | ##Input_raster=raster
3 | ##Input_vector=vector
4 | ##Transform_vector_to_raster_CRS=boolean
5 | ##Output_table=output table
6 |
7 | import os
8 | from osgeo import gdal, ogr, osr
9 | from processing.tools.vector import TableWriter
10 | from processing.core.GeoAlgorithmExecutionException import \
11 | GeoAlgorithmExecutionException
12 | from processing.tools.raster import *
13 |
14 | raster = gdal.Open(Input_raster)
15 |
16 | rasterBaseName = os.path.splitext(os.path.basename(Input_raster))[0]
17 |
18 | bandCount = raster.RasterCount
19 | rasterXSize = raster.RasterXSize
20 | rasterYSize = raster.RasterYSize
21 | geoTransform = raster.GetGeoTransform()
22 | rasterCRS = osr.SpatialReference()
23 | rasterCRS.ImportFromWkt(raster.GetProjectionRef())
24 |
25 | vector = ogr.Open(Input_vector, False)
26 | layer = vector.GetLayer(0)
27 | featureCount = layer.GetFeatureCount()
28 | if featureCount == 0:
29 | raise GeoAlgorithmExecutionException(
30 | 'There are no features in input vector.')
31 |
32 | vectorCRS = layer.GetSpatialRef()
33 |
34 | columns = []
35 | featureDefn = layer.GetLayerDefn()
36 | for i in xrange(featureDefn.GetFieldCount()):
37 | fieldDefn = featureDefn.GetFieldDefn(i)
38 | columns.append([fieldDefn.GetNameRef()])
39 |
40 | layer.ResetReading()
41 | feature = layer.GetNextFeature()
42 | while feature is not None:
43 | for i in xrange(featureDefn.GetFieldCount()):
44 | fieldDefn = featureDefn.GetFieldDefn(i)
45 | if fieldDefn.GetType() == ogr.OFTInteger:
46 | columns[i].append(feature.GetFieldAsInteger(i))
47 | elif fieldDefn.GetType() == ogr.OFTReal:
48 | columns[i].append(feature.GetFieldAsDouble(i))
49 | else:
50 | columns[i].append(feature.GetFieldAsString(i))
51 | feature = layer.GetNextFeature()
52 |
53 | current = 0
54 | total = bandCount + featureCount * bandCount
55 |
56 | if Transform_vector_to_raster_CRS:
57 | coordTransform = osr.CoordinateTransformation(vectorCRS, rasterCRS)
58 | if coordTransform is None:
59 | raise GeoAlgorithmExecutionException(
60 | 'Error while creating coordinate transformation.')
61 |
62 | columnName = rasterBaseName[:8]
63 | for i in xrange(bandCount):
64 | current += 1
65 | progress.setPercentage(int(current * total))
66 |
67 | rasterBand = raster.GetRasterBand(i + 1)
68 | try:
69 | data = rasterBand.ReadAsArray()
70 | except:
71 | raise GeoAlgorithmExecutionException(
72 | 'Error reading raster data. File might be too big.')
73 | layer.ResetReading()
74 | feature = layer.GetNextFeature()
75 | col = []
76 | col.append(columnName + '_' + str(i + 1))
77 | while feature is not None:
78 | current += 1
79 | progress.setPercentage(int(current * total))
80 |
81 | geometry = feature.GetGeometryRef()
82 | x = geometry.GetX()
83 | y = geometry.GetY()
84 | if Transform_vector_to_raster_CRS:
85 | pnt = coordTransform.TransformPoint(x, y, 0)
86 | x = pnt[0]
87 | y = pnt[1]
88 | (rX, rY) = mapToPixel(x, y, geoTransform)
89 | if rX >= rasterXSize or rY >= rasterYSize:
90 | feature = layer.GetNextFeature()
91 | continue
92 | value = data[rY, rX]
93 | col.append(value)
94 |
95 | feature = layer.GetNextFeature()
96 |
97 | rasterBand = None
98 | columns.append(col)
99 |
100 | raster = None
101 | vector.Destroy()
102 |
103 | writer = TableWriter(Output_table, 'utf-8', [])
104 | row = []
105 | for i in xrange(len(columns[0])):
106 | for col in columns:
107 | row.append(col[i])
108 | writer.addRecord(row)
109 | row[:] = []
110 |
--------------------------------------------------------------------------------
/rscripts/Douglas-Peucker.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer=vector
3 | ##distance=number 100
4 | ##Output1= output vector
5 | ##Output2= output vector
6 | library(rgdal)
7 | library('maptools')
8 | library(maptools)
9 | library(rgeos)
10 | library(geosphere)
11 | library(stats)
12 | distance_max = function(PointList){
13 | dmax = 0
14 | index = 0
15 | n <- dim(PointList)[1]
16 | for (i in (2 :(n - 1))){
17 | p=PointList[i,]
18 | line=rbind(PointList[1,], PointList[n,])
19 | d = dist2Line(p, line, distfun=distHaversine)
20 | if (d[1] > dmax){
21 | index = i
22 | dmax = d[1]
23 | }
24 | }
25 | result<-c(index,dmax)
26 | return (result)
27 | }
28 |
29 | DouglasPeucker= function(PointList, epsilon){
30 | n <- dim(PointList)[1]
31 | d<-distance_max(PointList)
32 | if (d[2] < epsilon){
33 | Result<-PointList
34 | }
35 | else if (d[2] >= epsilon){
36 | X<- PointList[1:d[1],]
37 | Y<- PointList[d[1]:n,]
38 | ResultList<-list(X,Y)
39 | k=1
40 | while (k= epsilon){
43 | m<-dim(ResultList[[k]])[1]
44 | X<- ResultList[[k]][1:d[1],]
45 | Y<- ResultList[[k]][d[1]:m,]
46 | ResultList[[k]]<-NULL
47 | ResultList<-c(ResultList,list(X,Y))
48 | k=k
49 | } else {
50 | m<-dim(ResultList[[k]])[1]
51 | ResultList[[k]]<-rbind(ResultList[[k]][1,],ResultList[[k]][m,])
52 | k=k+1
53 | }
54 | }
55 | Result<-c(ResultList)
56 | }
57 | return (Result)
58 | }
59 | p<-length(Layer@polygons)
60 | FRONT<-c()
61 | front<-c()
62 | for (w in (1:(p-1))){
63 | Layer1<-SpatialPolygonsDataFrame(SpatialPolygons(list(Layer@polygons[[w]])),
64 | data=as.data.frame(c(1:length(SpatialPolygons(list(Layer@polygons[[w]]))))),match.ID = F)
65 | for(W in((w+1):p)){
66 | Layer0<-SpatialPolygonsDataFrame(SpatialPolygons(list(Layer@polygons[[W]])),
67 | data=as.data.frame(c(1:length(SpatialPolygons(list(Layer@polygons[[W]]))))),match.ID = F)
68 | A<-gIntersection(Layer1, Layer0, byid=FALSE, id=NULL)
69 | if (class(A)!='NULL'){
70 | A<-gLineMerge(A, byid=FALSE, id = NULL)
71 | front<-c(front,A)
72 | }
73 | }
74 | if (class(front)!='NULL'){
75 | for (t in(1:length(front))){
76 | FRONT<-c(FRONT, Line(front[[t]]@lines[[1]]@Lines[[1]]@coords))
77 | }
78 | }
79 | front<-c()
80 | }
81 | FRONT<-Lines(FRONT,ID='lin')
82 | FRONT<-SpatialLines(list(FRONT))
83 | Output1=SpatialLinesDataFrame(FRONT, data=as.data.frame(c(1:length(FRONT))), match.ID = F)
84 |
85 | x<-length(Output1@lines[[1]]@Lines)
86 | ligne<-c()
87 | i=0
88 | for (z in (1:x)){
89 | Resultats<-c()
90 | points<-Output1@lines[[1]]@Lines[[z]]@coords
91 | result<-DouglasPeucker(PointList=points, epsilon=distance)
92 | if (class(result)=='matrix'){
93 | Resultats<-Resultats
94 | } else{
95 | Results<-rep(list(0),length(result))
96 | Result<-rep(list(0),length(result))
97 | for (k in (1:(length(result)))){
98 | Result[[k]]<-Line(result[[k]])
99 | i=i+1
100 | Results[[k]]<-Lines(list(Result[[k]]),ID=paste("lignes",i))
101 | }
102 | Resultats<-c(Resultats,Results)
103 | }
104 | ligne<-c(ligne,Resultats)
105 | }
106 | Ligne<-SpatialLines(ligne)
107 | Output1=SpatialLinesDataFrame(Ligne, data=as.data.frame(c(1:length(Ligne))), match.ID = F)
108 |
109 | i=0
110 | a<-c()
111 | X<-gSymdifference(Layer, Ligne, byid=FALSE, id=NULL)
112 |
113 | for (t in (1:length(X@polygons[[1]]@Polygons))){
114 | a<-c(a,Lines(Line(X@polygons[[1]]@Polygons[[t]]@coords), ID=paste('lin',i)))
115 | i=i+1
116 | }
117 | a<-SpatialLines(a)
118 | a<-SpatialLinesDataFrame(a, data=as.data.frame(c(1:length(a))), match.ID = F)
119 | ligne2<-c()
120 | x<-length(a@lines)
121 | for (z in (1:x)){
122 | Resultats<-c()
123 | points<-a@lines[[z]]@Lines[[1]]@coords
124 | result<-DouglasPeucker(PointList=points, epsilon=distance)
125 | if (class(result)=='matrix'){
126 | Resultats<-Resultats
127 | } else{
128 | Results<-rep(list(0),length(result))
129 | Result<-rep(list(0),length(result))
130 | for (k in (1:(length(result)))){
131 | Result[[k]]<-Line(result[[k]])
132 | i=i+1
133 | Results[[k]]<-Lines(list(Result[[k]]),ID=paste("lignes",i))
134 | }
135 | Resultats<-c(Resultats,Results)
136 | }
137 | ligne2<-c(ligne2,Resultats)
138 | }
139 | Ligne_2<-SpatialLines(ligne2)
140 | Output2=SpatialLinesDataFrame(Ligne_2, data=as.data.frame(c(1:length(Ligne_2))), match.ID = F)
--------------------------------------------------------------------------------
/rscripts/Kernel_density_estimation.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer1=vector
3 | ##Layer2=vector
4 | ##methode=number 1
5 | ##Output= output raster
6 | library(rgdal)
7 | library(surveillance)
8 | library(maptools)
9 | library(ggplot2)
10 | library(plyr)
11 | library(ellipse)
12 | library(fields)
13 | library(ks)
14 | library(maps)
15 | library(rgeos)
16 | library(snow)
17 | library(sp)
18 | library(ggmap)
19 | library(reshape2)
20 | sCircle <- function(n = 100, centre = c(0, 0), radius){
21 | theta <- seq(0, 2*pi, length = n)
22 | m <- cbind(cos(theta), sin(theta)) * radius
23 | m[, 1] <- m[, 1] + centre[1]
24 | m[, 2] <- m[, 2] + centre[2]
25 | colnames(m) <- c("x", "y")
26 | m
27 | }
28 | sWeights <- function(x, h, polygon) {
29 | leCercle <- sCircle(centre = x, radius = 1.759*h)
30 | POLcercle <- as(leCercle[-nrow(leCercle),], "gpc.poly")
31 | return(area.poly(intersect(polygon, POLcercle)) / area.poly(POLcercle))
32 | }
33 | sKDE <- function(U, polygon, optimal = TRUE, h = .1, parallel = FALSE, n_clusters = 4){
34 | if(!class(polygon) == "gpc.poly") polygon <- as(polygon, "gpc.poly")
35 | if(class(U) == "data.frame") U <- as.matrix(U)
36 | IND <- which(is.na(U[, 1]) == FALSE)
37 | U <- U[IND,]
38 | n <- nrow(U)
39 | if(optimal){
40 | H <- Hpi(U, binned = FALSE)
41 | H <- matrix(c(sqrt(H[1, 1] * H[2, 2]), 0, 0, sqrt(H[1, 1] * H[2, 2])), 2, 2)
42 | }
43 | if(!optimal){
44 | H <- matrix(c(h, 0, 0, h), 2, 2)
45 | }
46 | poidsU <- function(i, U, h, POL){
47 | x <- as.numeric(U[i,])
48 | sWeights(x, h, POL)
49 | }
50 | OMEGA <- NULL
51 | for(i in 1:n){
52 | OMEGA <- c(OMEGA, poidsU(i, U, h = sqrt(H[1, 1]), POL = polygon))
53 | }
54 | fhat <- kde(U, H, w = 1/OMEGA,
55 | xmin = c(min(get.bbox(polygon)$x), min(get.bbox(polygon)$y)),
56 | xmax = c(max(get.bbox(polygon)$x), max(get.bbox(polygon)$y)))
57 | fhat$estimate <- fhat$estimate * sum(1/OMEGA) / n
58 | vx <- unlist(fhat$eval.points[1])
59 | vy <- unlist(fhat$eval.points[2])
60 | VX <- cbind(rep(vx, each = length(vy)))
61 | VY <- cbind(rep(vy, length(vx)))
62 | VXY <- cbind(VX, VY)
63 | Ind <- matrix(inside.gpc.poly(x = VX, y = VY, polyregion = polygon), length(vy), length(vx))
64 | f0 <- fhat
65 | f0$estimate[t(Ind) == 0] <- NA
66 | list(
67 | X = fhat$eval.points[[1]],
68 | Y = fhat$eval.points[[2]],
69 | Z = fhat$estimate,
70 | ZNA = f0$estimate,
71 | H = fhat$H,
72 | W = fhat$w)
73 | }
74 | sKDE_without_c = function(U, polygon, optimal = TRUE, h = .1){
75 | polygon <- as(polygon, "gpc.poly")
76 | IND <- which(is.na(U[,1]) == FALSE)
77 | U <- U[IND,]
78 | n <- nrow(U)
79 | if(optimal){
80 | H <- Hpi(U,binned=FALSE)
81 | H <- matrix(c(sqrt(H[1, 1] * H[2, 2]), 0, 0, sqrt(H[1, 1] * H[2, 2])), 2, 2)
82 | }
83 | if(!optimal){
84 | H <- matrix(c(h, 0, 0, h), 2, 2)
85 | }
86 | fhat <- kde(U, H,
87 | xmin = c(min(get.bbox(polygon)$x), min(get.bbox(polygon)$y)),
88 | xmax = c(max(get.bbox(polygon)$x), max(get.bbox(polygon)$y)))
89 |
90 | vx <- unlist(fhat$eval.points[1])
91 | vy <- unlist(fhat$eval.points[2])
92 | VX <- cbind(rep(vx, each = length(vy)))
93 | VY <- cbind(rep(vy, length(vx)))
94 | VXY <- cbind(VX,VY)
95 | Ind <- matrix(inside.gpc.poly(x = VX, y = VY, polyregion = polygon), length(vy), length(vx))
96 | f0 <- fhat
97 | f0$estimate[t(Ind) == 0] <- NA
98 | list(
99 | X = fhat$eval.points[[1]],
100 | Y = fhat$eval.points[[2]],
101 | Z = fhat$estimate,
102 | ZNA = f0$estimate,
103 | H = fhat$H,
104 | W = fhat$W)
105 | }
106 | points<-coordinates(Layer1)
107 | polygon<-Layer2
108 | if(methode==0){
109 | estimate <- sKDE(U = points, polygon = polygon,
110 | optimal=TRUE, parallel = FALSE)
111 | }
112 | if(methode==1){
113 | estimate <- sKDE_without_c(U = points, polygon = polygon,
114 | optimal=TRUE)
115 | }
116 | matrix<-cbind()
117 | MinX<-min(estimate$X)
118 | MinY<-min(estimate$Y)
119 | MaxX<-max(estimate$X)
120 | MaxY<-max(estimate$Y)
121 | Seqx<-seq(MinX, MaxX, by=((MaxX - MinX)/(length(estimate$X) - 1)))
122 | Seqy<-seq(MinY, MaxY, by=((MaxY - MinY)/(length(estimate$X) - 1)))
123 | MSeqx<-rep(Seqx, length(Seqy))
124 | MSeqy<-rep(Seqy, length(Seqx))
125 | MSeqy <- sort(MSeqy, decreasing=F)
126 | Grille <- data.frame(X=MSeqx, Y=MSeqy)
127 | ZNA<-as.data.frame(estimate$ZNA)
128 | for(i in 1:length(Seqy)){
129 | for(j in 1:length(Seqx)){
130 | matrix<-rbind(matrix,ZNA[j,i])
131 | }
132 | }
133 | Grille<-cbind(Grille,as.numeric(matrix))
134 | coordinates(Grille)=c("X","Y")
135 | gridded(Grille)<-TRUE
136 | library(raster)
137 | result<-raster(Grille,layer=1, values=TRUE)
138 | proj4string(Layer1)->crs
139 | proj4string(result)<-crs
140 | Output=result
141 |
--------------------------------------------------------------------------------
/rscripts/Inverse_Distance_Weigthing.rsx:
--------------------------------------------------------------------------------
1 | ##Basic statistics=group
2 | ##Layer1= vector
3 | ##Layer2=raster
4 | ##ponderation= number 1
5 | ##output= output vector
6 | library(raster)
7 | library(sp)
8 | p1=coordinates(Layer1)
9 | result<-cbind()
10 | for (j in 1:dim(p1)[1]){
11 | point<-p1[j,]
12 | r <- readGDAL(Layer2@file@name)
13 | dist <- distanceFromPoints(r, point)
14 | position<-which(dist@data@values<1500)
15 | dist_f<-dist@data@values[position]
16 | z<-as.numeric(unlist(r@data))
17 | a=0
18 | b=0
19 | if (ponderation==0){
20 | for(i in 1:length(position)){
21 | if (dist_f[i]>100){
22 | if (!is.na(z[position[i]])){
23 | if (z[position[i]]!=0){
24 | a=a+z[position[i]]*(1/dist_f[i]^2)
25 | b=b+(1/dist_f[i]^2)
26 | }
27 | }
28 | }
29 | }
30 | }
31 | if (ponderation==1){
32 | for(i in 1:length(position)){
33 | if (dist_f[i]>100){
34 | if (!is.na(z[position[i]])){
35 | if (z[position[i]]!=0){
36 | a=a+z[position[i]]*(1/(1+dist_f[i]^2))
37 | b=b+(1/(1+dist_f[i]^2))
38 | }
39 | }
40 | }
41 | }
42 | }
43 | if (ponderation==2){
44 | for(i in 1:length(position)){
45 | if (dist_f[i]>100){
46 | if (!is.na(z[position[i]])){
47 | if (z[position[i]]!=0){
48 | a=a+z[position[i]]*((1-0.102)/(1+(403/dist_f[i])^2))
49 | b=b+((1-0.102)/(1+(403/dist_f[i])^2))
50 | }
51 | }
52 | }
53 | }
54 | }
55 | if (ponderation==3){
56 | for(i in 1:length(position)){
57 | if (dist_f[i]>100){
58 | if (!is.na(z[position[i]])){
59 | if (z[position[i]]!=0){
60 | a=a+z[position[i]]*(1-0.102)*exp(-403/dist_f[i])
61 | b=b+(1-0.102)*exp(-403/dist_f[i])
62 | }
63 | }
64 | }
65 | }
66 | }
67 | if (ponderation==4){
68 | for(i in 1:length(position)){
69 | if (dist_f[i]>100){
70 | if (!is.na(z[position[i]])){
71 | if (z[position[i]]!=0){
72 | a=a+z[position[i]]*(1-0.102)*(1-1.5*(403/dist_f[i])+0.5*(403/dist_f[i])^3)
73 | b=b+(1-0.102)*(1-1.5*(403/dist_f[i])+0.5*(403/dist_f[i])^3)
74 | }
75 | }
76 | }
77 | }
78 | }
79 | if (ponderation==5){
80 | for(i in 1:length(position)){
81 | if (dist_f[i]>100){
82 | if (!is.na(z[position[i]])){
83 | if (z[position[i]]!=0){
84 | integrand <- function(x) {(1/(0.394*sqrt(2*pi)))*exp(-0.5*((log(x)-1.76)/0.394)^2)}
85 | int<-integrate(integrand, lower = 0, upper =dist_f[i])
86 | C<-1-int$value
87 | a=a+z[position[i]]*C
88 | b=b+C
89 | }
90 | }
91 | }
92 | }
93 | }
94 | if (ponderation==6){
95 | for(i in 1:length(position)){
96 | if (dist_f[i]>100){
97 | if (!is.na(z[position[i]])){
98 | if (z[position[i]]!=0){
99 | alpha<-(-2)*dist_f[i]+2
100 | C<-1/(1+exp(-alpha))
101 | a=a+z[position[i]]*C
102 | b=b+C
103 | }
104 | }
105 | }
106 | }
107 | }
108 | result<-rbind(result,cbind(as.numeric(point[1]), as.numeric(point[2]),a/b))
109 | colnames(result)<-c("X","Y","attribut")
110 | }
111 | if (ponderation==7){
112 | result<-cbind()
113 | for (j in 1:dim(p1)[1]){
114 | point<-p1[j,]
115 | r <- readGDAL(Layer2@file@name)
116 | dist <- distanceFromPoints(r, point)
117 | position<-which(dist@data@values<1500)
118 | dist_f<-dist@data@values[position]
119 | z<-as.numeric(unlist(r@data))
120 | a0=a1=a2=a3=a4=a5=a6=0
121 | b0=b1=b2=b3=b4=b5=b6=0
122 | for(i in 1:length(position)){
123 | if (dist_f[i]>100){
124 | if (!is.na(z[position[i]])){
125 | if (z[position[i]]!=0){
126 | a0=a0+z[position[i]]*(1/dist_f[i]^2)
127 | b0=b0+(1/dist_f[i]^2)
128 | a1=a1+z[position[i]]*(1/(1+dist_f[i]^2))
129 | b1=b1+(1/(1+dist_f[i]^2))
130 | a2=a2+z[position[i]]*((1-0.102)/(1+(403/dist_f[i])^2))
131 | b2=b2+((1-0.102)/(1+(403/dist_f[i])^2))
132 | a3=a3+z[position[i]]*(1-0.102)*exp(-403/dist_f[i])
133 | b3=b3+(1-0.102)*exp(-403/dist_f[i])
134 | a4=a4+z[position[i]]*(1-0.102)*(1-1.5*(403/dist_f[i])+0.5*(403/dist_f[i])^3)
135 | b4=b4+(1-0.102)*(1-1.5*(403/dist_f[i])+0.5*(403/dist_f[i])^3)
136 | integrand <- function(x) {(1/(0.394*sqrt(2*pi)))*exp(-0.5*((log(x)-1.76)/0.394)^2)}
137 | int<-integrate(integrand, lower = 0, upper =dist_f[i])
138 | C<-1-int$value
139 | a5=a5+z[position[i]]*C
140 | b5=b5+C
141 | alpha<-(-2)*dist_f[i]+2
142 | C<-1/(1+exp(-alpha))
143 | a6=a6+z[position[i]]*C
144 | b6=b6+C
145 | }
146 | }
147 | }
148 | }
149 | result<-rbind(result,cbind(as.numeric(point[1]), as.numeric(point[2]),a0/b0,a1/b1,a2/b2,a3/b3,a4/b4,a5/b5,a6/b6))
150 | }
151 | colnames(result)<-c("X","Y","1/d","1/(d+1)","C_ratio","C_exp","C_sph","C_lit","logit")
152 | }
153 | matrix<-cbind(result[,1],result[,2])
154 | matrix<-as.matrix(matrix)
155 | result<-SpatialPointsDataFrame(matrix, as.data.frame(result, row.names=NULL))
156 | proj4string(Layer1)->crs
157 | proj4string(result)<-crs
158 | output<-result
159 |
--------------------------------------------------------------------------------
/models/mult.model:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "inputs": {
4 | "RASTERLAYER_R1": {
5 | "values": {
6 | "pos": {
7 | "values": {
8 | "y": 91.0,
9 | "x": 158.0
10 | },
11 | "class": "point"
12 | },
13 | "param": {
14 | "values": {
15 | "isAdvanced": false,
16 | "name": "RASTERLAYER_R1",
17 | "value": null,
18 | "exported": null,
19 | "hidden": false,
20 | "optional": false,
21 | "description": "r1"
22 | },
23 | "class": "processing.core.parameters.ParameterRaster"
24 | }
25 | },
26 | "class": "processing.modeler.ModelerAlgorithm.ModelerParameter"
27 | },
28 | "RASTERLAYER_R2": {
29 | "values": {
30 | "pos": {
31 | "values": {
32 | "y": 190.0,
33 | "x": 399.0
34 | },
35 | "class": "point"
36 | },
37 | "param": {
38 | "values": {
39 | "isAdvanced": false,
40 | "name": "RASTERLAYER_R2",
41 | "value": null,
42 | "exported": null,
43 | "hidden": false,
44 | "optional": false,
45 | "description": "r2"
46 | },
47 | "class": "processing.core.parameters.ParameterRaster"
48 | }
49 | },
50 | "class": "processing.modeler.ModelerAlgorithm.ModelerParameter"
51 | }
52 | },
53 | "group": "Example models",
54 | "name": "Mulltiply 2 layers",
55 | "algs": {
56 | "SAGARASTERCALCULATOR_1": {
57 | "values": {
58 | "name": "SAGARASTERCALCULATOR_1",
59 | "paramsFolded": true,
60 | "outputs": {
61 | "RESULT": {
62 | "values": {
63 | "description": "out",
64 | "pos": {
65 | "values": {
66 | "y": 443.0,
67 | "x": 504.0
68 | },
69 | "class": "point"
70 | }
71 | },
72 | "class": "processing.modeler.ModelerAlgorithm.ModelerOutput"
73 | }
74 | },
75 | "outputsFolded": true,
76 | "pos": {
77 | "values": {
78 | "y": 332.0,
79 | "x": 279.0
80 | },
81 | "class": "point"
82 | },
83 | "dependencies": [],
84 | "params": {
85 | "FORMULA": "a*b",
86 | "XGRIDS": [
87 | {
88 | "values": {
89 | "name": "RASTERLAYER_R1"
90 | },
91 | "class": "processing.modeler.ModelerAlgorithm.ValueFromInput"
92 | },
93 | {
94 | "values": {
95 | "name": "RASTERLAYER_R2"
96 | },
97 | "class": "processing.modeler.ModelerAlgorithm.ValueFromInput"
98 | }
99 | ]
100 | },
101 | "active": true,
102 | "consoleName": "saga:rastercalculator",
103 | "description": "Raster calculator"
104 | },
105 | "class": "processing.modeler.ModelerAlgorithm.Algorithm"
106 | }
107 | }
108 | },
109 | "class": "processing.modeler.ModelerAlgorithm.ModelerAlgorithm"
110 | }
--------------------------------------------------------------------------------
/scripts/Extract_raster_values_to_shapefile.py:
--------------------------------------------------------------------------------
1 | ##Raster=group
2 | ##Input_raster=raster
3 | ##Input_vector=vector
4 | ##Transform_vector_to_raster_CRS=boolean
5 | ##Output_layer=output vector
6 |
7 | import os
8 | from osgeo import gdal, ogr, osr
9 | from processing.core.GeoAlgorithmExecutionException import \
10 | GeoAlgorithmExecutionException
11 | from processing.tools.raster import *
12 |
13 | raster = gdal.Open(Input_raster)
14 |
15 | rasterBaseName = os.path.splitext(os.path.basename(Input_raster))[0]
16 |
17 | bandCount = raster.RasterCount
18 | rasterXSize = raster.RasterXSize
19 | rasterYSize = raster.RasterYSize
20 | geoTransform = raster.GetGeoTransform()
21 | rasterCRS = osr.SpatialReference()
22 | rasterCRS.ImportFromWkt(raster.GetProjectionRef())
23 |
24 | vector = ogr.Open(Input_vector, False)
25 | layer = vector.GetLayer(0)
26 | featureCount = layer.GetFeatureCount()
27 | if featureCount == 0:
28 | raise GeoAlgorithmExecutionException(
29 | 'There are no features in input vector.')
30 |
31 | vectorCRS = layer.GetSpatialRef()
32 |
33 | drv = ogr.GetDriverByName('ESRI Shapefile')
34 | if drv is None:
35 | raise GeoAlgorithmExecutionException(
36 | "'ESRI Shapefile' driver is not available.")
37 |
38 | outputDataset = drv.CreateDataSource(Output_layer)
39 | if outputDataset is None:
40 | raise GeoAlgorithmExecutionException('Creation of output file failed.')
41 |
42 | outputLayer = outputDataset.CreateLayer(
43 | str(os.path.splitext(os.path.basename(Output_layer))[0]),
44 | vectorCRS, ogr.wkbPoint)
45 | if outputLayer is None:
46 | raise GeoAlgorithmExecutionException('Layer creation failed.')
47 |
48 | featureDefn = layer.GetLayerDefn()
49 | for i in xrange(featureDefn.GetFieldCount()):
50 | fieldDefn = featureDefn.GetFieldDefn(i)
51 | if outputLayer.CreateField(fieldDefn) != 0:
52 | raise GeoAlgorithmExecutionException("Can't create field '%s'."
53 | % fieldDefn.GetNameRef())
54 |
55 | columnName = str(rasterBaseName[:8])
56 | for i in xrange(bandCount):
57 | fieldDefn = ogr.FieldDefn(columnName + '_' + str(i + 1), ogr.OFTReal)
58 | fieldDefn.SetWidth(18)
59 | fieldDefn.SetPrecision(8)
60 | if outputLayer.CreateField(fieldDefn) != 0:
61 | raise GeoAlgorithmExecutionException("Can't create field '%s'."
62 | % fieldDefn.GetNameRef())
63 |
64 | outputFeature = ogr.Feature(outputLayer.GetLayerDefn())
65 |
66 | current = 0
67 | total = bandCount + featureCount * bandCount + featureCount
68 |
69 | layer.ResetReading()
70 | feature = layer.GetNextFeature()
71 | while feature is not None:
72 | current += 1
73 | progress.setPercentage(int(current * total))
74 |
75 | outputFeature.SetFrom(feature)
76 | if outputLayer.CreateFeature(outputFeature) != 0:
77 | raise GeoAlgorithmExecutionException('Failed to add feature.')
78 | feature = layer.GetNextFeature()
79 |
80 | vector.Destroy()
81 | outputFeature.Destroy()
82 | outputDataset.Destroy()
83 |
84 | vector = ogr.Open(Output_layer, True)
85 | layer = vector.GetLayer(0)
86 |
87 | if Transform_vector_to_raster_CRS:
88 | coordTransform = osr.CoordinateTransformation(vectorCRS, rasterCRS)
89 | if coordTransform is None:
90 | raise GeoAlgorithmExecutionException(
91 | 'Error while creating coordinate transformation.')
92 |
93 | for i in xrange(bandCount):
94 | current += 1
95 | progress.setPercentage(int(current * total))
96 |
97 | rasterBand = raster.GetRasterBand(i + 1)
98 | try:
99 | data = rasterBand.ReadAsArray()
100 | except:
101 | raise GeoAlgorithmExecutionException(
102 | 'Error reading raster data. File might be too big.')
103 | layer.ResetReading()
104 | feature = layer.GetNextFeature()
105 | while feature is not None:
106 | current += 1
107 | progress.setPercentage(int(current * total))
108 |
109 | geometry = feature.GetGeometryRef()
110 | x = geometry.GetX()
111 | y = geometry.GetY()
112 | if Transform_vector_to_raster_CRS:
113 | pnt = coordTransform.TransformPoint(x, y, 0)
114 | x = pnt[0]
115 | y = pnt[1]
116 | (rX, rY) = mapToPixel(x, y, geoTransform)
117 | if rX >= rasterXSize or rY >= rasterYSize:
118 | feature = layer.GetNextFeature()
119 | continue
120 | value = data[rY, rX]
121 |
122 | feature.SetField(columnName + '_' + str(i + 1), float(value))
123 | if layer.SetFeature(feature) != 0:
124 | raise GeoAlgorithmExecutionException('Failed to update feature.')
125 |
126 | feature = layer.GetNextFeature()
127 |
128 | rasterBand = None
129 |
130 | raster = None
131 | vector.Destroy()
132 |
--------------------------------------------------------------------------------