├── .gitignore ├── .gitattributes ├── CDSterms.png ├── deps └── build.jl ├── paper ├── Mattsson et al. 2019 - An autopilot for energy models.pdf └── Mattsson et al. 2019 - Supplementary - An autopilot for energy models.pdf ├── LICENSE ├── src ├── syntheticdemand_paperfigures.jl ├── GlobalEnergyGIS.jl ├── maketempera5.jl ├── coordinatedescent.jl ├── GeoArray.jl ├── external_scripts_GMT.jl ├── makewindera5.jl ├── makedistances.jl ├── rasterize_shapefiles.jl ├── syntheticdemand_training.jl ├── GIStemp.jl ├── era5download.jl ├── sspregiondefinitions.jl ├── downloaddatasets.jl ├── solarposition.jl ├── make_regions.jl ├── map_test_plots.jl ├── makesolarera5.jl ├── syntheticdemand_inputdata.jl ├── GIShydro.jl ├── helperfunctions.jl ├── make_auxiliary_datasets.jl └── GISsolar.jl └── Project.toml /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | deps/build.log 3 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /CDSterms.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/niclasmattsson/GlobalEnergyGIS/HEAD/CDSterms.png -------------------------------------------------------------------------------- /deps/build.jl: -------------------------------------------------------------------------------- 1 | using Conda 2 | 3 | Conda.add_channel("conda-forge") 4 | Conda.add("cdsapi") 5 | -------------------------------------------------------------------------------- /paper/Mattsson et al. 2019 - An autopilot for energy models.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/niclasmattsson/GlobalEnergyGIS/HEAD/paper/Mattsson et al. 2019 - An autopilot for energy models.pdf -------------------------------------------------------------------------------- /paper/Mattsson et al. 2019 - Supplementary - An autopilot for energy models.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/niclasmattsson/GlobalEnergyGIS/HEAD/paper/Mattsson et al. 2019 - Supplementary - An autopilot for energy models.pdf -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Niclas Mattsson 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/syntheticdemand_paperfigures.jl: -------------------------------------------------------------------------------- 1 | # run this file using "include" after adding Plots to the default Julia environment 2 | using GlobalEnergyGIS, Plots, Plots.PlotMeasures 3 | 4 | function plotlines(models, startday; variables=defaultvariables, era_year=2018) 5 | normdemand, normdemand_predicted, regionlist = getcvdata(models; variables=defaultvariables, era_year=era_year) 6 | regionlist[5] = "Bosnia & Herz." 7 | plot(normdemand, linecolor = "#0000FFFF", size=(1800,1350), lw=4, layout = (4,11), label="", xticks=[0,4000,8000], 8 | title=reshape(regionlist, (1,44)), ylims=(0.5,1.7), titlefont=(13,:Calibri), tickfont=(11,:Calibri), bottom_margin = 10px) 9 | display(plot!(normdemand_predicted, linecolor = "#FF00007F", lw=4, label="")) 10 | # alternative colors "#00FC" and "#D008" 11 | plot(normdemand[startday*24 .+ (1:24*7), :], linecolor = "#0000FFFF", size=(1800,1350), layout = (4,11), label="", 12 | title=reshape(regionlist, (1,44)), ylims=(0.5,1.7), titlefont=(13,:Calibri), tickfont=(11,:Calibri), bottom_margin = 10px) 13 | plot!(normdemand_predicted[startday*24 .+ (1:24*7), :], linecolor = "#FF00007F", label="") 14 | end 15 | 16 | plotly() 17 | 18 | models = crossvalidate(variables=defaultvariables, nrounds=1000, max_depth=7, eta=0.005, subsample=0.05, metrics=["mae"]) 19 | plotlines(models, 123) 20 | -------------------------------------------------------------------------------- /src/GlobalEnergyGIS.jl: -------------------------------------------------------------------------------- 1 | module GlobalEnergyGIS 2 | 3 | export GISwind, GISsolar, GIShydro, GIStemp, GISturbines, makedistances, annualwindindex 4 | 5 | using MAT, HDF5, ProgressMeter, Random, Interpolations, BenchmarkTools, Images, 6 | Statistics, DelimitedFiles, Dates, NCDatasets, JLD, Parameters, ImageSegmentation, 7 | StatsBase, CSV, Distances, Printf, TimeZones, DataFrames 8 | 9 | include("GeoArray.jl") 10 | include("rasterize_shapefiles.jl") 11 | include("make_auxiliary_datasets.jl") 12 | include("era5download.jl") 13 | include("helperfunctions.jl") 14 | include("make_regions.jl") 15 | include("regiondefinitions.jl") 16 | include("solarposition.jl") 17 | include("makewindera5.jl") 18 | include("makesolarera5.jl") 19 | include("maketempera5.jl") 20 | include("makedistances.jl") 21 | include("GISwind.jl") 22 | include("GISsolar.jl") 23 | include("GIShydro.jl") 24 | include("GIStemp.jl") 25 | include("GISturbines.jl") 26 | include("downloaddatasets.jl") 27 | include("sspregiondefinitions.jl") 28 | include("mapping.jl") 29 | include("map_test_plots.jl") 30 | include("syntheticdemand_inputdata.jl") 31 | include("syntheticdemand_training.jl") 32 | include("readclimatedata.jl") 33 | 34 | function GISsequence(regionname, regionmat) 35 | saveregions(regionname, regionmat) 36 | makedistances(regionname) 37 | createmaps(regionname) 38 | GISsolar(gisregion=regionname, plotmasks=true) 39 | GISwind(gisregion=regionname, plotmasks=true) 40 | GIShydro(gisregion=regionname) 41 | predictdemand(gisregion=regionname, sspscenario="ssp2-26", sspyear=2050, era_year=2018) 42 | end 43 | 44 | end 45 | -------------------------------------------------------------------------------- /src/maketempera5.jl: -------------------------------------------------------------------------------- 1 | export maketempera5 2 | 3 | # Can optionally zero cells that are zero in the Global Wind Atlas to save a lot of disk space. 4 | function maketempera5(; year=2018, land_cells_only=true) 5 | hours = 24*Dates.daysinyear(year) 6 | gridsize = (1280,640) 7 | 8 | datafolder = getconfig("datafolder") 9 | downloadsfolder = joinpath(datafolder, "downloads") 10 | 11 | filename = joinpath(datafolder, "era5temp$year.h5") 12 | isfile(filename) && error("File $filename exists in $datafolder, please delete or rename manually.") 13 | 14 | land = imresize(JLD.load(joinpath(datafolder, "landcover.jld"), "landcover"), gridsize) 15 | 16 | println("Creating HDF5 file: $filename") 17 | h5open(filename, "w") do file 18 | group = file["/"] 19 | dataset_temp = create_dataset(group, "temp", datatype(Float32), dataspace(hours,gridsize...), chunk=(hours,16,16), blosc=3) 20 | dataset_meantemp = create_dataset(group, "meantemp", datatype(Float32), dataspace(gridsize...), chunk=gridsize, blosc=3) 21 | 22 | totaltemp = zeros(gridsize) 23 | hour = 1 24 | 25 | count = 0 26 | for month = 1:12, monthhalf = 1:2 27 | if monthhalf == 1 28 | firstday, lastday = "01", "15" 29 | else 30 | firstday = "16" 31 | lastday = Dates.daysinmonth(Date("$year-$month")) 32 | end 33 | monthstr = lpad(month,2,'0') 34 | date = "$year-$monthstr-$firstday/$year-$monthstr-$lastday" 35 | erafile = joinpath(downloadsfolder, "temp$year-$monthstr$firstday-$monthstr$lastday.nc") 36 | 37 | count += 1 38 | println("\nFile $count of 24:") 39 | println("Reading temperatures from $erafile...") 40 | # Permute dimensions to get hours as dimension 1 41 | ncdataset = Dataset(erafile) 42 | temp = permutedims((ncdataset["t2m"][:,:,:] .- 273.15) .* (land .> 0), [3,1,2]) 43 | 44 | totaltemp += sumdrop(temp, dims=1) 45 | len = size(temp,1) 46 | println("Writing to $filename...") 47 | dataset_temp[hour:hour+len-1,:,:] = temp 48 | hour += len 49 | end 50 | println("\nWriting meantemp to $filename...") 51 | dataset_meantemp[:,:] = totaltemp/hours 52 | end 53 | nothing 54 | end 55 | -------------------------------------------------------------------------------- /Project.toml: -------------------------------------------------------------------------------- 1 | name = "GlobalEnergyGIS" 2 | uuid = "31bfc850-377e-11e9-327a-adf9289f3a6c" 3 | authors = ["niclasmattsson <33093224+niclasmattsson@users.noreply.github.com>"] 4 | version = "0.1.0" 5 | 6 | [deps] 7 | ArchGDAL = "c9ce4bd3-c3d5-55b8-8973-c0e20141b8c3" 8 | BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf" 9 | CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" 10 | CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0" 11 | CategoricalArrays = "324d7699-5711-5eae-9e2f-1d82baa6b597" 12 | ColorSchemes = "35d6a980-a343-548e-a6ea-1d62b119f2f4" 13 | Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d" 14 | DataDeps = "124859b0-ceae-595e-8997-d05f6a7a8dfe" 15 | DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" 16 | Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" 17 | DelaunayTriangulation = "927a84f5-c5f4-47a5-9785-b46e178433df" 18 | DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab" 19 | Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" 20 | Downloads = "f43a241f-c20a-4ad4-852c-f6b1247861c6" 21 | FileIO = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" 22 | GDAL = "add2ef01-049f-52c4-9ee2-e494f65e021a" 23 | GDAL_jll = "a7073274-a066-55f0-b90d-d619367d196c" 24 | GLMakie = "e9467ef8-e4e7-5192-8a1a-b1aee30e663a" 25 | GeoDataFrames = "62cb38b5-d8d2-4862-a48e-6a340996859f" 26 | GeoMakie = "db073c08-6b98-4ee5-b6a4-5efafb3259c6" 27 | HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f" 28 | ImageFiltering = "6a3955dd-da59-5b1f-98d4-e7296123deb5" 29 | ImageSegmentation = "80713f31-8817-5129-9cf8-209ff8fb23e1" 30 | Images = "916415d5-f1e6-5110-898d-aaa5f9f070e0" 31 | Interpolations = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59" 32 | JLD = "4138dd39-2aa7-5051-a626-17a0bb65d9c8" 33 | MAT = "23992714-dd62-5051-b70f-ba57cb901cac" 34 | NCDatasets = "85f8d34a-cbdd-5861-8df4-14fed0d494ab" 35 | PROJ_jll = "58948b4f-47e0-5654-a9ad-f609743f8632" 36 | Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a" 37 | Parsers = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" 38 | Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" 39 | Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" 40 | ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca" 41 | Proj = "c94c279d-25a6-4763-9509-64d165bea63e" 42 | PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" 43 | Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" 44 | Rasters = "a3a2b9e3-a471-40c9-b274-f788e487c689" 45 | Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" 46 | StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" 47 | TimeZones = "f269a46b-ccf7-5d73-abea-4c690281aa53" 48 | UrlDownload = "856ac37a-3032-4c1c-9122-f86d88358c8b" 49 | XGBoost = "009559a3-9522-5dbb-924b-0b6ed2b22bb9" 50 | XLSX = "fdbf4ff8-1666-58a4-91e7-1b58723a45e0" 51 | 52 | [compat] 53 | GDAL = "1.7.1" 54 | -------------------------------------------------------------------------------- /src/coordinatedescent.jl: -------------------------------------------------------------------------------- 1 | function choosedirection(f::Function, index, deltas) 2 | best, i = findmin(f(index + d) for d in deltas) 3 | dir = deltas[i] 4 | return dir, best, index + dir 5 | end 6 | 7 | function descend(f::Function, dir, best, index) 8 | while true 9 | newindex = index + dir 10 | val = f(newindex) 11 | if val < best 12 | best = val 13 | index = newindex 14 | else 15 | return best, index 16 | end 17 | end 18 | end 19 | 20 | coordinate_descent(A::AbstractArray, index) = 21 | coordinate_descent(ndx -> get(A, ndx, typemax(eltype(A))), index) 22 | 23 | # Custom version of findmin(A) for a matrix A which is sampled from a convex function. 24 | # Expanded to support a function f to evaluate (to avoid precomputing all elements of A). 25 | # https://discourse.julialang.org/t/is-there-a-findmin-a-that-uses-local-search-instead-of-visiting-all-elements-of-matrix-a/74045/4 26 | function coordinate_descent(f::Function, index) 27 | ci = CartesianIndex(index) 28 | same = zero(ci) 29 | deltas = -oneunit(ci):oneunit(ci) 30 | while true 31 | dir, best, ci = choosedirection(f, ci, deltas) 32 | dir == same && return best, ci 33 | best, ci = descend(f, dir, best, ci) 34 | end 35 | end 36 | 37 | function testdescent() 38 | nc = Dataset(simname("mohc", 85, "v", 100, 2050, false)) 39 | xylon = nc["lon"][:,:] 40 | xylat = nc["lat"][:,:] 41 | lons = -10.5:0.1:32 42 | lats = 34.5:0.1:71.5 43 | indices = zeros(CartesianIndex{2}, length(lons), length(lats)) 44 | distances = zeros(length(lons), length(lats)) 45 | lon, lat = 15, 60 46 | # d = @time @. (xylon - lon)^2 + (xylat - lat)^2 47 | # dmin = @time findmin(d) 48 | # f = ndx -> (xylon[ndx] - lon)^2 + (xylat[ndx] - lat)^2 49 | # f = ndx -> (get(xylon, ndx, Inf) - lon)^2 + (get(xylat, ndx, Inf) - lat)^2 50 | f = ndx -> checkbounds(Bool, xylon, ndx) ? (xylon[ndx] - lon)^2 + (xylat[ndx] - lat)^2 : Inf 51 | # dmin = @time coordinate_descent(f, CartesianIndex(3,3)) 52 | dmin = @btime coordinate_descent($f, $CartesianIndex(250,310)) 53 | end 54 | 55 | # function choosedirection(A::AbstractArray, index, deltas) 56 | # best, i = findmin(get(A, index + d, typemax(eltype(A))) for d in deltas) 57 | # dir = deltas[i] 58 | # return dir, best, index + dir 59 | # end 60 | 61 | # function descend(A::AbstractArray, dir, best, index) 62 | # while true 63 | # newindex = index + dir 64 | # val = get(A, newindex, typemax(eltype(A))) 65 | # if val < best 66 | # best = val 67 | # index = newindex 68 | # else 69 | # return best, index 70 | # end 71 | # end 72 | # end 73 | 74 | # function coordinate_descent(A::AbstractArray, index) 75 | # ci = CartesianIndex(index) 76 | # same = zero(ci) 77 | # deltas = -oneunit(ci):oneunit(ci) 78 | # while true 79 | # dir, best, ci = choosedirection(A, ci, deltas) 80 | # dir == same && return best, ci 81 | # best, ci = descend(A, dir, best, ci) 82 | # end 83 | # end 84 | -------------------------------------------------------------------------------- /src/GeoArray.jl: -------------------------------------------------------------------------------- 1 | export GeoArray, lonlat_index, lookup, getlon, getlat, lonindex, latindex, crop 2 | 3 | const EPS = 1e-8 4 | 5 | # Assumes the latitudes of the internal array are in reverse order. 6 | # Works for 3D arrays if lons and lats are in the dimensions 1 & 2 (see lonlat_index). 7 | # Maybe add GeoArray(...; lonlatdims=(1,2)) keyword argument to make this more general. 8 | struct GeoArray{T,N} <: AbstractArray{T,N} 9 | arr::AbstractArray{T,N} 10 | res::Float64 11 | lonlim::Tuple{Float64,Float64} 12 | latlim::Tuple{Float64,Float64} 13 | 14 | # Check if lon-lat limits are valid before constructing. 15 | function GeoArray(arr, res, lonlim, latlim) 16 | calcsize = (lonlim[2] - lonlim[1]) / res, (latlim[2] - latlim[1]) / res 17 | if all(size(arr)[1:2] .≈ calcsize) 18 | return new{eltype(arr), ndims(arr)}(arr, res, lonlim, latlim) 19 | else 20 | error("lon-lat limits incompatible with GeoArray size") 21 | end 22 | end 23 | end 24 | 25 | GeoArray(arr, res, extent::Vector{<:Real}) = GeoArray(arr, res, (extent[1],extent[3]), (extent[2],extent[4])) 26 | GeoArray(arr, res) = GeoArray(arr, res, (-180, 180), (-90, 90)) 27 | 28 | function Base.show(io::IO, ::MIME"text/plain", a::GeoArray) 29 | summary(io, a) 30 | println(io, ": res = $(a.res), lonlim = $(a.lonlim), latlim = $(a.latlim)") 31 | print("Array: ") 32 | show(io, "text/plain", a.arr) 33 | end 34 | 35 | Base.size(a::GeoArray) = size(a.arr) 36 | Base.@propagate_inbounds Base.getindex(a::GeoArray, i::Int) = getindex(a.arr, i) 37 | Base.IndexStyle(::Type{<:GeoArray{T,N}}) where T where N = IndexStyle(Array{T,N}) 38 | 39 | getlon(a::GeoArray, i::Int) = a.lonlim[1] + a.res*(i - 0.5) # i - 1 + 0.5 40 | getlat(a::GeoArray, i::Int) = a.latlim[2] - a.res*(i - 0.5) 41 | 42 | lonindex(a::GeoArray, lon) = floor(Int, (lon - a.lonlim[1])/a.res) + 1 43 | latindex(a::GeoArray, lat) = floor(Int, (a.latlim[2] - lat)/a.res) + 1 44 | 45 | lonlat_index(a::GeoArray, lon, lat, t; restrict=false) = CartesianIndex(lonlat_index(a, lon, lat; restrict), t) 46 | function lonlat_index(a::GeoArray, lon, lat; restrict=false) 47 | ndx = CartesianIndex(lonindex(a, lon), latindex(a, lat)) 48 | return restrict ? clamp(ndx, zero(ndx), CartesianIndex(size(a)[1:2])) : ndx 49 | end 50 | 51 | lookup(a::GeoArray, args...; restrict=false) = a[lonlat_index(a, args...; restrict)] 52 | 53 | lon_indices_within(a::GeoArray, lonlow, lonhigh) = 54 | max(1, lonindex(a, lonlow)):min(size(a,1), lonindex(a, lonhigh) - 1) 55 | lat_indices_within(a::GeoArray, latlow, lathigh) = 56 | max(1, latindex(a, lathigh)):min(size(a,2), latindex(a, latlow) - 1) 57 | 58 | function crop(a::GeoArray, targetextent::Vector{<:Real}) 59 | ilons = lonindex(a, targetextent[1]) : lonindex(a, targetextent[3]) 60 | ilats = latindex(a, targetextent[4]) : latindex(a, targetextent[2]) 61 | newextent = [getlon(a, ilons[1]) - a.res/2, getlat(a, ilats[end]) - a.res/2, 62 | getlon(a, ilons[end]) + a.res/2, getlat(a, ilats[1]) + a.res/2] 63 | return GeoArray(a[ilons, ilats], a.res, newextent) 64 | end 65 | 66 | # Also write: resize_categorical(), rescale() 67 | -------------------------------------------------------------------------------- /src/external_scripts_GMT.jl: -------------------------------------------------------------------------------- 1 | # Run this file with include() after adding packages below to the environment. 2 | # E.g. includet("./GlobalEnergyGIS/src/external_scripts.jl") 3 | 4 | using GMT 5 | 6 | G = GlobalEnergyGIS 7 | 8 | function geotest8() 9 | pol, nodes = sphtriangulate("@gshhs_c.txt", voronoi=:v, skip=true, nodes=true) 10 | 11 | # Compute distances in km 12 | Gtt = sphdistance(pol, region=:global360, inc=1, voronoi=true, nodes=nodes, dist_unit=:k) 13 | t_cpt = makecpt(cmap=:hot, range=(0,3500)) 14 | 15 | # Make a basic image plot and overlay contours, Voronoi polygons and coastlines 16 | grdimage(Gtt, proj=(name=:ortho, center=(-140,30)), figsize=16, xshift=2, yshift=5) 17 | grdcontour!(Gtt, cont=500, annot=(int=1000, labels=(font=(10,:Helvetica,:white),)), 18 | range=500, labels=(Line=[0 90 203 -10; 175 60 170 -30; -50 30 220 -5],), 19 | pen=((annot=true, pen=(0.75,:white)), (contour=true, pen=(0.25,:white))) ) 20 | 21 | GMT.plot!(pol, pen=(0.25, :green, :dotted)) 22 | coast!(shore=1, land=:steelblue, area=(0,1,1), 23 | frame=(annot=30, grid=30, title="Distances from GSHHG crude coastlines"), show=true) 24 | end 25 | 26 | function geotest9() 27 | table_5 = gmtread("@Table_5_11.txt") # The data used in this example 28 | T = gmtinfo(table_5, nearest_multiple=(dz=25, col=2)) 29 | makecpt(color=:jet, range=T.text[1][3:end]) # Make it also the current cmap 30 | 31 | subplot(grid=(2,2), limits=(0,6.5,-0.2,6.5), col_axes=(bott=true,), row_axes=(left=true,), 32 | figsize=8, margins=0.1, panel_size=(8,0), tite="Delaunay Triangulation") 33 | # First draw network and label the nodes 34 | net_xy = triangulate(table_5, M=true) 35 | GMT.plot(net_xy, lw=:thinner) 36 | GMT.plot(table_5, marker=:circle, ms=0.3, fill=:white, MarkerLine=:thinnest) 37 | GMT.text(table_5, font=6, rec_number=0) 38 | 39 | # Then draw network and print the node values 40 | GMT.plot(net_xy, lw=:thinner, panel=(1,2)) 41 | GMT.plot(table_5, marker=:circle, ms=0.08, fill=:black) 42 | GMT.text(table_5, zvalues=true, font=6, justify=:LM, fill=:white, pen="", clearance="1p", offset=("6p",0), noclip=true) 43 | 44 | # Finally color the topography 45 | GMT.contour(table_5, pen=:thin, mesh=(:thinnest,:dashed), labels=(dist=2.5,), panel=(2,1)) 46 | GMT.contour(table_5, colorize=true, panel=(2,2)) 47 | subplot("show") 48 | end 49 | 50 | function ehub500() 51 | buses = G.CSV.read(G.in_datafolder("Bus_data_EHUB500 - buses_original.csv"), G.DataFrame) 52 | unique!(buses, ["x-coordinate", "y-coordinate"]) 53 | xy = Matrix(buses[:, ["x-coordinate", "y-coordinate"]]) 54 | 55 | # bbox = collect(Iterators.flatten(extrema(uxy, dims=1))) 56 | bbox = [4, 32, 54.5, 71.5] 57 | ds = GMTdataset(xy) 58 | ds.bbox = bbox 59 | ds.ds_bbox = bbox 60 | 61 | pol, nodes = sphtriangulate(ds, voronoi=:v, skip=true, nodes=true) 62 | 63 | t_cpt = makecpt(cmap=:categorical, range=(0,size(xy,1),1), wrap=:w) 64 | GMT.plot(pol, proj=:moll, region=bbox, close=true, cmap=t_cpt, alpha=65, wrap=:w) 65 | GMT.scatter!(nodes, fill=:red, markersize="2p") 66 | coast!(land=nothing, DCW=(country="SE,NO,DK,FI", pen=(0.25,:black)), frame=(annot=:auto, ticks=:auto, grid=:auto), show=true) 67 | 68 | gmtwrite("ehub500_id.shp", pol) 69 | # gmtwrite("ehub500.geojson", pol) 70 | return pol 71 | end 72 | 73 | function ehub500_test() 74 | buses = G.CSV.read(G.in_datafolder("Bus_data_EHUB500 - buses_original.csv"), G.DataFrame) 75 | unique!(buses, ["x-coordinate", "y-coordinate"]) 76 | xy = Matrix(buses[:, ["x-coordinate", "y-coordinate", "bus_id"]]) 77 | 78 | # bbox = collect(Iterators.flatten(extrema(uxy, dims=1))) 79 | bbox = [4, 32, 54.5, 71.5, 5500, 90000] 80 | # ds = GMTdataset(xy) 81 | # ds.bbox = bbox 82 | # ds.ds_bbox = bbox 83 | ds = mat2ds(xy) 84 | 85 | pol, nodes = sphtriangulate(ds, voronoi=:v, skip=true, nodes="nodes.txt", verbose=true) 86 | 87 | gmtwrite("ehub500_id.shp", pol) 88 | # gmtwrite("test.geojson", pol) 89 | pol 90 | end 91 | 92 | function readshape() 93 | df = G.GDF.read("ehub500_id.shp") 94 | end 95 | -------------------------------------------------------------------------------- /src/makewindera5.jl: -------------------------------------------------------------------------------- 1 | export makewindera5, makemonthlywindera5 2 | 3 | # Can optionally zero cells that are zero in the Global Wind Atlas to save a lot of disk space. 4 | function makewindera5(; year=2018, windatlas_only=true) 5 | hours = 24*Dates.daysinyear(year) 6 | gridsize = (1280,640) 7 | 8 | datafolder = getconfig("datafolder") 9 | downloadsfolder = joinpath(datafolder, "downloads") 10 | 11 | filename = joinpath(datafolder, "era5wind$year.h5") 12 | isfile(filename) && error("File $filename exists in $datafolder, please delete or rename manually.") 13 | 14 | windatlas = reshape(imresize(getwindatlas(), gridsize), (1,gridsize...)) 15 | 16 | println("Creating HDF5 file: $filename") 17 | h5open(filename, "w") do file 18 | group = file["/"] 19 | dataset_wind = create_dataset(group, "wind", datatype(Float32), dataspace(hours,gridsize...), chunk=(hours,16,16), blosc=3) 20 | dataset_meanwind = create_dataset(group, "meanwind", datatype(Float32), dataspace(gridsize...), chunk=gridsize, blosc=3) 21 | 22 | totalwind = zeros(gridsize) 23 | hour = 1 24 | 25 | count = 0 26 | for month = 1:12, monthhalf = 1:2 27 | if monthhalf == 1 28 | firstday, lastday = "01", "15" 29 | else 30 | firstday = "16" 31 | lastday = Dates.daysinmonth(Date("$year-$month")) 32 | end 33 | monthstr = lpad(month,2,'0') 34 | date = "$year-$monthstr-$firstday/$year-$monthstr-$lastday" 35 | erafile = joinpath(downloadsfolder, "wind$year-$monthstr$firstday-$monthstr$lastday.nc") 36 | 37 | count += 1 38 | println("\nFile $count of 24:") 39 | println("Reading wind components from $erafile...") 40 | # Permute dimensions to get hours as dimension 1 (for efficient iteration in GISwind()) 41 | ncdataset = Dataset(erafile) 42 | u100 = permutedims(ncdataset["u100"][:,:,:], [3,1,2]) 43 | v100 = permutedims(ncdataset["v100"][:,:,:], [3,1,2]) 44 | 45 | println("Calculating absolute speed...") 46 | wind = replace(sqrt.(u100.^2 + v100.^2), missing => 0.0) .* (windatlas .> 0) 47 | 48 | totalwind = totalwind + sumdrop(wind, dims=1) 49 | len = size(wind,1) 50 | println("Writing to $filename...") 51 | dataset_wind[hour:hour+len-1,:,:] = wind 52 | hour += len 53 | end 54 | println("\nWriting meanwind to $filename...") 55 | dataset_meanwind[:,:] = totalwind/hours 56 | end 57 | nothing 58 | end 59 | 60 | # Unlike makemonthlysolarera5(), the wind version can't use monthly ERA5 variables 61 | # which take monthly averages of u and v components of wind speed. Doing so would 62 | # greatly underestimate absolute wind speeds in locations where wind direction 63 | # changes frequently. So instead we use ordinary hourly ERA5 data. This assumes 64 | # that every year between 1979-2019 has been downloaded (for wind). 65 | function makemonthlywindera5(; windatlas_only=true) 66 | years = 1979:2019 67 | nyears = length(years) 68 | nmonths = nyears*12 69 | gridsize = (1280,640) 70 | 71 | datafolder = getconfig("datafolder") 72 | filename = joinpath(datafolder, "era5monthlywind.h5") 73 | isfile(filename) && error("File $filename exists in $datafolder, please delete or rename manually.") 74 | 75 | println("Creating HDF5 file: $filename") 76 | h5open(filename, "w") do file 77 | group = file["/"] 78 | monthlywind = create_dataset(group, "monthlywind", datatype(Float32), dataspace(nmonths,gridsize...), chunk=(nmonths,16,16), blosc=3) 79 | annualwind = create_dataset(group, "annualwind", datatype(Float32), dataspace(nyears,gridsize...), chunk=(nyears,16,16), blosc=3) 80 | 81 | for (y, year) in enumerate(years) 82 | print("$year: ") 83 | options = WindOptions(merge(windoptions(), Dict(:era_year => year))) 84 | windatlas, _, meanwind, windspeed = read_wind_datasets(options, 1:36000, 1:18000) 85 | monthdays = [Dates.daysinmonth(Date("$year-$m")) for m in 1:12] 86 | lasthour = cumsum(24*monthdays) 87 | firsthour = [1; lasthour[1:end-1] .+ 1] 88 | for m = 1:12 89 | monthlywind[12*(y-1) + m,:,:] = 90 | mean(windspeed[firsthour[m]:lasthour[m], :, :], dims=1) 91 | end 92 | annualwind[y,:,:] = meanwind 93 | end 94 | end 95 | nothing 96 | end 97 | -------------------------------------------------------------------------------- /src/makedistances.jl: -------------------------------------------------------------------------------- 1 | function makedistances(gisregion; scenarioyear="ssp2_2050", res=0.01) 2 | regions, offshoreregions, regionlist, lonrange, latrange = loadregions(gisregion) 3 | numreg = length(regionlist) 4 | 5 | println("Finding area- and population-weighted region centers...") 6 | geocenters, popcenters = getregioncenters(regions, numreg, lonrange, latrange, res, scenarioyear) 7 | println("\nCalculating distances between centers...") 8 | distances = [greatcircledistance(Tuple(popcenters[r1,:]), Tuple(popcenters[r2,:])) for r1 = 1:numreg, r2 = 1:numreg] 9 | 10 | println("\nFinding neighboring regions for transmission connections...") 11 | println("...onshore...") 12 | connected = connectedregions(regions, numreg) 13 | println("...offshore...") 14 | connectedoffshore = connectedregions(offshoreregions, numreg) 15 | connectedoffshore[connected] .= false 16 | # regionpop = [sum(pop.==r) for r = 1:numreg] 17 | 18 | println("\nSaving results...") 19 | matopen(in_datafolder("output", "distances_$gisregion.mat"), "w") do file 20 | write(file, "distances", distances) 21 | write(file, "connected", connected) 22 | write(file, "connectedoffshore", connectedoffshore) 23 | write(file, "regionlist", string.(regionlist)) 24 | # write(file, "population", pop) 25 | # write(file, "demand", demand) 26 | write(file, "regioncenters_lon", popcenters[:,1]) 27 | write(file, "regioncenters_lat", popcenters[:,2]) 28 | end 29 | 30 | # Also save the region file in Matlab format 31 | # regions2matlab(gisregion) 32 | nothing 33 | end 34 | 35 | # returns great circle distance in km between points given as (lat,lon) tuples (in degrees). 36 | greatcircledistance(point1::Tuple, point2::Tuple) = haversine(point1, point2, 6371.0) 37 | 38 | function getregioncenters(regions, numreg, lonrange, latrange, res, scenarioyear) 39 | lats = (90-res/2:-res:-90+res/2)[latrange] # latitude values (pixel center) 40 | cellarea = rastercellarea.(lats, res) 41 | 42 | mkpath(in_datafolder("output")) 43 | 44 | pop = JLD.load(in_datafolder("population_$scenarioyear.jld"), "population")[lonrange,latrange] 45 | popdens = pop ./ cellarea' 46 | 47 | geocenters, popcenters = regioncenters(regions, numreg, popdens, lonrange, latrange, res) 48 | return geocenters, popcenters # column order (lat,lon) 49 | end 50 | 51 | function regioncenters(regions, numreg, popdens, lonrange, latrange, res) 52 | lons = (-180+res/2:res:180-res/2)[lonrange] # longitude values (pixel center) 53 | lats = (90-res/2:-res:-90+res/2)[latrange] # latitude values (pixel center) 54 | rows, cols = size(regions) 55 | geocenters = zeros(numreg,2) 56 | popcenters = zeros(numreg,2) 57 | counts = zeros(Int, numreg) 58 | popdenssum = zeros(numreg) 59 | for c = 1:cols 60 | lat = lats[c] 61 | for r = 1:rows 62 | reg = regions[r,c] 63 | (reg == 0 || reg == NOREGION) && continue 64 | lon = lons[r] 65 | geocenters[reg,:] += [lat, lon] 66 | popcenters[reg,:] += popdens[r,c] .* [lat, lon] 67 | counts[reg] += 1 68 | popdenssum[reg] += popdens[r,c] 69 | end 70 | end 71 | geocenters ./= counts 72 | popcenters ./= popdenssum 73 | return geocenters, popcenters # column order (lat,lon) 74 | end 75 | 76 | # find a suitable spot to put the region name label on the map 77 | function find_landarea_near_popcenter(regions, numreg, popcenters, lonrange, latrange, res) 78 | lons = (-180+res/2:res:180-res/2)[lonrange] # longitude values (pixel center) 79 | lats = (90-res/2:-res:-90+res/2)[latrange] # latitude values (pixel center) 80 | rows, cols = size(regions) 81 | filterdistance = max(rows, cols)*res # about 50 km for a Europe map 82 | km_per_degree = π*2*6371/360 83 | disk = diskfilterkernel(filterdistance/km_per_degree/res) 84 | mostlyland = imfilter((regions.>0) .& (regions.!=NOREGION), disk) # 0-1 85 | landcenters = zeros(numreg, 2) 86 | landfactors = fill(Inf, numreg) 87 | weight = 0.9 88 | for c = 1:cols 89 | lat = lats[c] 90 | for r = 1:rows 91 | reg = regions[r,c] 92 | (reg == 0 || reg == NOREGION) && continue 93 | lon = lons[r] 94 | distpop = greatcircledistance((lat,lon), Tuple(popcenters[reg,:])) 95 | closelandfactor = (1 - weight)*distpop/10000 + weight*(1 - mostlyland[r,c]) 96 | if closelandfactor < landfactors[reg] 97 | landfactors[reg] = closelandfactor 98 | landcenters[reg,:] .= lat, lon 99 | end 100 | end 101 | end 102 | return landcenters 103 | end 104 | 105 | function connectedregions(regions, numreg) 106 | connected = zeros(Bool, numreg, numreg) 107 | rows, cols = size(regions) 108 | for c = 1:cols 109 | for r = 1:rows 110 | reg = regions[r,c] 111 | (reg == 0 || reg == NOREGION) && continue 112 | for adjc = max(1, c-1):min(cols, c+1), adjr = max(1, r-1):min(rows, r+1) 113 | reg2 = regions[adjr,adjc] 114 | (reg2 == 0 || reg2 == NOREGION) && continue 115 | connected[reg,reg2] = true 116 | end 117 | end 118 | end 119 | for c = 1:numreg, r = 1:numreg 120 | connected[r,c] = connected[r,c] || connected[c,r] 121 | end 122 | return connected 123 | end 124 | -------------------------------------------------------------------------------- /src/rasterize_shapefiles.jl: -------------------------------------------------------------------------------- 1 | import GDAL 2 | using ArchGDAL, GDAL_jll, PROJ_jll 3 | 4 | export rasterize, readraster, saveTIFF 5 | 6 | function rasterize_AG(infile::String, outfile::String, options::Vector{<:AbstractString}) 7 | ArchGDAL.read(infile) do dataset 8 | GDAL.close(GDAL.rasterize( 9 | outfile, 10 | Ptr{GDAL.GDALDatasetH}(C_NULL), 11 | dataset.ptr, 12 | GDAL.rasterizeoptionsnew(options, C_NULL), C_NULL)) 13 | end 14 | end 15 | 16 | # works (creates the TIFF and saves it) but then crashes 17 | function rasterize_AG2(infile::String, outfile::String, options::Vector{<:AbstractString}) 18 | ArchGDAL.read(infile) do dataset 19 | ArchGDAL.unsafe_gdalrasterize(dataset, options, dest=outfile) 20 | end 21 | end 22 | 23 | # uses the command line version instead (gdal_rasterize) 24 | # significantly faster for some reason, also gives a simple progress indication 25 | function rasterize(infile::String, outfile::String, options::Vector{<:AbstractString}; sql::String="") 26 | gdal_rasterize_path() do gdal_rasterize 27 | if isempty(sql) 28 | run(`$gdal_rasterize $options $infile $outfile`) 29 | else 30 | run(`$gdal_rasterize $options -sql $sql $infile $outfile`) 31 | end 32 | end 33 | end 34 | 35 | function getextent(geotransform::Vector{Float64}, rastersize::Tuple{Int,Int}) 36 | @assert length(geotransform) == 6 "A GeoTransform vector must have 6 elements." 37 | left, xres, _, top, _, yres = geotransform 38 | width, height = rastersize 39 | bottom, right = top+yres*height, left+xres*width 40 | return [left, bottom, right, top] 41 | end 42 | 43 | function read3draster(infile::String) 44 | ArchGDAL.read(infile) do dataset 45 | ArchGDAL.read(dataset) 46 | end 47 | end 48 | 49 | function readraster(infile::String, extentflag::Symbol, dim::Int=1) 50 | local raster, geotransform 51 | raster = ArchGDAL.read(infile) do dataset 52 | # display(ArchGDAL.getproj(dataset)) 53 | geotransform = ArchGDAL.getgeotransform(dataset) 54 | ArchGDAL.read(dataset)[:,:,dim] 55 | end 56 | coordextent = getextent(geotransform, size(raster)) 57 | if extentflag == :extend_to_full_globe 58 | left, bottom, right, top = coordextent 59 | xres, yres = geotransform[2], geotransform[6] 60 | newwidth, newheight = round.(Int, (360/xres, -180/yres)) 61 | xindexes = 1+round(Int, (left-(-180))/xres):newwidth-round(Int, (180-right)/xres) 62 | yindexes = 1+round(Int, (top-90)/yres):newheight+round(Int, (bottom-(-90))/yres) 63 | adjusted = zeros(eltype(raster), (newwidth, newheight)) 64 | adjusted[xindexes, yindexes] = raster 65 | return adjusted, coordextent 66 | else # extentflag == :getextent 67 | return raster, coordextent 68 | end 69 | end 70 | 71 | readraster(infile::String, dim::Int=1) = readraster(infile, :none, dim)[1] 72 | 73 | function saveTIFF(x::AbstractArray, filename::String, extent::Vector{Float64}; nodata=Inf32, compressmethod="LZW") 74 | # EPSG:4326 (switch to importEPSG later) 75 | # http://yeesian.com/ArchGDAL.jl/latest/projections/#Creating-Spatial-References-1 76 | wkt_string = "GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0],UNIT[\"degree\",0.0174532925199433],AUTHORITY[\"EPSG\",\"4326\"]]" 77 | width, height = size(x) 78 | xres = (extent[3]-extent[1])/width 79 | yres = (extent[4]-extent[2])/height 80 | nbands = size(x, 3) 81 | ArchGDAL.create(filename; 82 | driver = ArchGDAL.getdriver("GTiff"), 83 | width, 84 | height, 85 | nbands, 86 | dtype = eltype(x), 87 | options = ["BIGTIFF=YES", "COMPRESS=$compressmethod"] 88 | ) do dataset 89 | ## assign the projection and transformation parameters 90 | ArchGDAL.setgeotransform!(dataset, [extent[1], xres, 0, extent[4], 0, -yres]) 91 | ArchGDAL.setproj!(dataset, wkt_string) 92 | for b = 1:nbands 93 | band = ArchGDAL.getband(dataset, b) 94 | ArchGDAL.setnodatavalue!(band, nodata) 95 | ArchGDAL.write!(band, x[:,:,b]) 96 | end 97 | end 98 | nothing 99 | end 100 | 101 | saveTIFF(x::AbstractMatrix, filename::String) = saveTIFF(x, filename, [-180.0, -90.0, 180.0, 90.0]) 102 | 103 | # ArchGDAL tutorial: http://www.acgeospatial.co.uk/julia-prt3/ 104 | 105 | # ogrinfo -al -so C:/Stuff/Datasets/gadm36/gadm36.shp 106 | 107 | # rasterize_AG("C:/Stuff/Datasets/gadm36/gadm36.shp", "testtest.tif", "-a ID_0 -ts 4000 2000 -ot Byte") 108 | # shapefile2tif("C:/Stuff/Datasets/gadm36/gadm36.shp", "Europe", "ID_0", 4300, [-11, 34, 32, 72], ') 109 | 110 | # ogr2ogr -f CSV C:/Stuff/Julia/gadmfields012.csv -sql "select uid,id_0,name_0,id_1,name_1,id_2,name_2 from gadm36" C:/Stuff/Datasets/gadm36/gadm36.shp 111 | # gdal_rasterize -a UID -ot Int32 -ts 5000 2500 C:\Stuff\Datasets\gadm36\gadm36.shp C:/Stuff/Julia/globtest.tif 112 | # gdal_rasterize -a UID -ot Int32 -ts 36000 18000 -co COMPRESS=LZW C:/Stuff/Datasets/gadm36/gadm36.shp C:/Users/niclas/Downloads/globtest.tif 113 | # gdal_rasterize -a UID -ot Int32 -tr 0.01 0.01 -te -180 -90 180 90 -co COMPRESS=LZW C:/Stuff/Datasets/gadm36/gadm36.shp C:/Users/niclas/Downloads/globtest.tif 114 | 115 | # run(`gdal_rasterize -a UID -ot Int32 -tr 0.01 0.01 -te -180 -90 180 90 -co COMPRESS=LZW C:/Stuff/Datasets/gadm36/gadm36.shp C:/Users/niclas/Downloads/globtest.tif`) 116 | # rasterize_AG("C:/Stuff/Datasets/gadm36/gadm36.shp", "C:/Users/niclas/Downloads/globtest3.tif", split("-a ID_0 -ot Byte -tr 0.01 0.01 -te -180 -90 180 90 -co COMPRESS=LZW")) 117 | # rasterize("C:/Stuff/Datasets/gadm36/gadm36.shp", "C:/Users/niclas/Downloads/globtest3.tif", split("-a ID_0 -ot Byte -tr 0.01 0.01 -te -180 -90 180 90 -co COMPRESS=LZW")) 118 | 119 | # timemem-1.0 gdal_translate -r mode -tr 0.1 0.1 -co COMPRESS=LZW gadm.tif gadmsmall.tif 120 | -------------------------------------------------------------------------------- /src/syntheticdemand_training.jl: -------------------------------------------------------------------------------- 1 | using XGBoost, Printf 2 | 3 | export predictdemand, trainmodel, crossvalidate, defaultvariables, getcvdata, plotcv 4 | 5 | const defaultvariables = [:localhour, :weekend01, :temp_monthly, :ranked_month, :temp_top3, 6 | :temp1_mean, :temp1_qlow, :temp1_qhigh, :demandpercapita, :gdppercapita] 7 | 8 | function predictdemand(; variables=defaultvariables, gisregion="Europe8", 9 | sspscenario="ssp2-34", sspyear=2050, era_year=2018, numcenters=3, mindist=3.3, 10 | nrounds=100, max_depth=7, eta=0.05, subsample=0.75, metrics=["mae"], more_xgoptions...) 11 | df, offsets, pop = buildtrainingdata(; gisregion=gisregion, sspscenario=sspscenario, 12 | sspyear=sspyear, era_year=era_year, numcenters=numcenters, mindist=mindist) 13 | regionlist = unique(df[:, :country]) 14 | numhours = 24*daysinyear(era_year) 15 | demandpercapita = df[1:numhours:end, :demandpercapita] # MWh/year/capita 16 | select!(df, variables) 17 | traindata = Matrix(df) 18 | model = trainmodel(; nrounds=nrounds, max_depth=max_depth, eta=eta, subsample=subsample, metrics=metrics, more_xgoptions...) 19 | normdemand = XGBoost.predict(model, traindata) # mean(normdemand) == 1 20 | numreg = length(regionlist) 21 | demand = reshape(normdemand, (numhours, numreg)) .* (demandpercapita/8760)' .* pop' # MW 22 | println("\nConverting synthetic demand to UTC...") 23 | for r = 1:numreg 24 | demand[:,r] = circshift(demand[:,r], round(Int, -offsets[r])) 25 | end 26 | println("\nSaving...") 27 | JLD.save(in_datafolder("output", 28 | "SyntheticDemand_$(gisregion)_$sspscenario-$(sspyear)_$era_year.jld"), 29 | "demand", demand, compress=true) 30 | nothing 31 | end 32 | 33 | function trainmodel(; variables=defaultvariables, nrounds=100, xgoptions...) 34 | df_train, offsets = loadtrainingdata() 35 | println("\nTraining model...") 36 | select!(df_train, variables) 37 | traindata = Matrix(df_train) 38 | normdemand = loaddemanddata()[:, :normdemand] 39 | 40 | model = xgboost(traindata, nrounds; label=normdemand, xgoptions...) 41 | end 42 | 43 | function crossvalidate(; variables=defaultvariables, nrounds=100, max_depth=7, eta=0.05, subsample=0.75, metrics=["mae"], more_xgoptions...) 44 | df_train, offsets = loadtrainingdata() 45 | regionlist = unique(df_train[:, :country]) 46 | select!(df_train, variables) 47 | traindata = Matrix(df_train) 48 | normdemand = loaddemanddata()[:, :normdemand] 49 | 50 | numreg = length(regionlist) 51 | params = Any["max_depth"=>round(Int, max_depth), "eta"=>eta, "subsample"=>subsample, "metrics"=>metrics, more_xgoptions...] 52 | 53 | models = nfold_cv_return(traindata, nrounds, numreg; label=normdemand, metrics=metrics, param=params) # "rmse" or "mae" 54 | display(importance(models[1], string.(variables))) 55 | 56 | return models 57 | end 58 | 59 | function getcvdata(models; variables=defaultvariables, era_year=2018) 60 | df_train, offsets = loadtrainingdata() 61 | regionlist = unique(df_train[:, :country]) 62 | numhours = 24*daysinyear(era_year) 63 | numreg = length(models) 64 | demandpercapita = df_train[1:numhours:end, :demandpercapita] # MWh/year/capita 65 | select!(df_train, variables) 66 | traindata = Matrix(df_train) 67 | normdemand = loaddemanddata()[:, :normdemand] 68 | normdemand_predicted = similar(normdemand) 69 | for reg = 1:numreg 70 | rows = numhours*(reg-1) + 1 : numhours*reg 71 | normdemand_predicted[rows] = XGBoost.predict(models[reg], traindata[rows,:]) # mean(normdemand) == 1 72 | end 73 | return reshape(normdemand, (numhours,numreg)), reshape(normdemand_predicted, (numhours,numreg)), regionlist 74 | end 75 | 76 | 77 | 78 | # Same as XGBoost.nfold_cv() but deterministic: split the training data into nfold equal parts using sequential indices. 79 | # Also returns the models used in the last iteration. 80 | function nfold_cv_return(data, num_boost_round::Integer = 10, nfold::Integer = 3; label = Union{}, 81 | param=[], metrics=[], obj = Union{}, feval = Union{}, fpreproc = Union{}, 82 | show_stdv = true, seed::Integer = 0, kwargs...) 83 | dtrain = XGBoost.makeDMatrix(data, label) 84 | results = String[] 85 | cvfolds = mknfold_deterministic(dtrain, nfold, param, metrics, fpreproc=fpreproc, kwargs = kwargs) 86 | for i in 1:num_boost_round 87 | for f in cvfolds 88 | XGBoost.update(f.bst, 1, f.dtrain, obj = obj) 89 | end 90 | res = XGBoost.aggcv([XGBoost.eval_set(f.bst, f.watchlist, i, feval = feval) for f in cvfolds], 91 | show_stdv = show_stdv) 92 | push!(results, res) 93 | @printf(stderr, "%s\n", res) 94 | end 95 | models = [f.bst for f in cvfolds] 96 | return models 97 | end 98 | 99 | # Same as XGBoost.mknfold() but deterministic: split the training data into nfold equal parts using sequential indices 100 | function mknfold_deterministic(dall::DMatrix, nfold::Integer, param, evals=[]; fpreproc = Union{}, 101 | kwargs = []) 102 | idx = collect(1:XGBoost.XGDMatrixNumRow(dall.handle)) 103 | kstep = size(idx)[1] / nfold 104 | idset = [idx[round(Int64, (i-1) * kstep) + 1 : min(size(idx)[1],round(Int64, i * kstep))] for i in 1:nfold] 105 | ret = XGBoost.CVPack[] 106 | for k in 1:nfold 107 | selected = Int[] 108 | for i in 1:nfold 109 | if k != i 110 | selected = vcat(selected, idset[i]) 111 | end 112 | end 113 | dtrain = XGBoost.slice(dall, selected) 114 | dtest = XGBoost.slice(dall, idset[k]) 115 | if typeof(fpreproc) == Function 116 | dtrain, dtest, tparam = XGBoost.fpreproc(dtrain, dtest, deepcopy(param)) 117 | else 118 | tparam = param 119 | end 120 | plst = vcat([itm for itm in param], [("eval_metric", itm) for itm in evals]) 121 | plst = vcat(plst, [(string(itm[1]), string(itm[2])) for itm in kwargs]) 122 | push!(ret, XGBoost.CVPack(dtrain, dtest, plst)) 123 | end 124 | return ret 125 | end -------------------------------------------------------------------------------- /src/GIStemp.jl: -------------------------------------------------------------------------------- 1 | # tempoptions() = Dict( 2 | # :gisregion => "Europe8", # "Europe8", "Eurasia38", "Scand3" 3 | # 4 | # :scenarioyear => "ssp2_2050", # default scenario and year for population and grid access datasets 5 | # :era_year => 2018, # which year of the ERA5 time series to use 6 | # 7 | # :numcenters => 3 # number of population centers in each region (for average hourly temperature) 8 | # :mindist => 3.3 # minimum distance between population centers [in ERA5 pixels] 9 | # ) 10 | 11 | function GIStemp(gisregion::String, scenarioyear::String, era_year::Int, numcenters::Int, mindist::Float64) 12 | println("\nReading temperature data for $gisregion...") 13 | 14 | regions, offshoreregions, regionlist, lonrange, latrange, pop, meantemp, temp, eralonranges, eralatrange = 15 | read_temperature_datasets(gisregion, scenarioyear, era_year) 16 | 17 | erapop = rescale_population_to_ERA5_res(era_year, regions, offshoreregions, regionlist, lonrange, latrange, pop, temp) 18 | tempmask = dropdims(sum(abs.(temp[1000:1100,:,:]), dims=1), dims=1) .> 0 # detect cells with any temp data (coasts?) 19 | 20 | numreg = length(regionlist) 21 | popcenters = [findpopcenters(erapop[i,:,:].*tempmask, numcenters, mindist) for i = 1:numreg] 22 | # if any region lacks enough population centers, just repeat the ones found (only happens for tiny regions, e.g. Malta) 23 | for (i,p) in enumerate(popcenters) 24 | length(p) == 0 && error("No population centers found for region $(regionlist[i]).") 25 | if length(p) < numcenters 26 | popcenters[i] = repeat(p, outer=numcenters)[1:numcenters] 27 | end 28 | end 29 | 30 | eralonrange = vcat(eralonranges...) 31 | erares = 0.28125 32 | eralons = (-180+erares/2:erares:180-erares/2)[eralonrange] # longitude values (pixel center) 33 | eralats = (90-erares/2:-erares:-90+erares/2)[eralatrange] # latitude values (pixel center) 34 | 35 | hours = DateTime(era_year, 1, 1, 0) : Hour(1) : DateTime(era_year, 12, 31, 23) 36 | numhours = length(hours) 37 | numhours != size(temp,1) && error("Inconsistent number of hours.") 38 | 39 | temp_popcenters = [temp[h, popcenters[r][i]...] for h = 1:numhours, r = 1:numreg, i = 1:numcenters] 40 | 41 | return hours, temp_popcenters 42 | end 43 | 44 | function read_temperature_datasets(gisregion, scenarioyear, era_year) 45 | res = 0.01 # resolution of auxiliary datasets [degrees per pixel] 46 | erares = 0.28125 # resolution of ERA5 datasets [degrees per pixel] 47 | 48 | println("\nReading auxiliary datasets...") 49 | regions, offshoreregions, regionlist, lonrange, latrange = loadregions(gisregion) 50 | pop = JLD.load(in_datafolder("population_$scenarioyear.jld"), "population")[lonrange,latrange] 51 | 52 | println("Reading ERA5 temperature dataset...") 53 | eralonranges, eralatrange = eraranges(lonrange, latrange, res, erares) 54 | 55 | @time meantemp, temp = h5open(in_datafolder("era5temp$era_year.h5"), "r") do file 56 | if length(eralonranges) == 1 57 | file["meantemp"][eralonranges[1], eralatrange], 58 | file["temp"][:,eralonranges[1], eralatrange] 59 | else 60 | [file["meantemp"][eralonranges[1], eralatrange]; file["meantemp"][eralonranges[2], eralatrange]], 61 | [file["temp"][:, eralonranges[1], eralatrange] file["temp"][:, eralonranges[2], eralatrange]] 62 | end 63 | end 64 | return regions, offshoreregions, regionlist, lonrange, latrange, pop, meantemp, temp, eralonranges, eralatrange 65 | end 66 | 67 | function rescale_population_to_ERA5_res(era_year, regions, offshoreregions, regionlist, lonrange, latrange, pop, temp) 68 | res = 0.01 # resolution of auxiliary datasets [degrees per pixel] 69 | erares = 0.28125 # resolution of ERA5 datasets [degrees per pixel] 70 | 71 | println("Calculating population centers at ERA5 resolution...") 72 | eralons, eralats, lonmap, latmap, cellarea = eralonlat(Dict(:res=>res, :erares=>erares), lonrange, latrange) 73 | 74 | numreg = length(regionlist) 75 | yearlength, nlons, nlats = size(temp) 76 | firsttime = DateTime(era_year, 1, 1) 77 | 78 | erapop = zeros(numreg, nlons, nlats) 79 | 80 | # Run times vary wildly depending on geographical area (because of far offshore regions with mostly zero wind speeds). 81 | # To improve the estimated time of completing the progress bar, iterate over latitudes in random order. 82 | Random.seed!(1) 83 | updateprogress = Progress(nlats, 1) 84 | for j in randperm(nlats) 85 | eralat = eralats[j] 86 | colrange = latmap[lat2col(eralat+erares/2, res):lat2col(eralat-erares/2, res)-1] 87 | for i = 1:nlons 88 | eralon = eralons[i] 89 | # get all high resolution row and column indexes within this ERA5 cell 90 | rowrange = lonmap[lon2row(eralon-erares/2, res):lon2row(eralon+erares/2, res)-1] 91 | 92 | for c in colrange, r in rowrange 93 | (c == 0 || r == 0) && continue 94 | reg = regions[r,c] 95 | if reg > 0 && reg != NOREGION 96 | erapop[reg,i,j] += pop[r,c] 97 | end 98 | end 99 | end 100 | next!(updateprogress) 101 | end 102 | 103 | return erapop 104 | end 105 | 106 | # Find the (row,col) coordinates of the n largest population cells which are at least mindist cells apart from each other. 107 | function findpopcenters(pop, n, mindist) 108 | n = min(n, sum(pop .> 0)) # make sure there are enough non-zero cells 109 | p = copy(pop) 110 | coords = CartesianIndex{2}[] 111 | for i = 1:n 112 | val, index = findmax(p) 113 | if val == 0 # if the largest cell has zero pop then we were too ambitious - redo with lower distance 114 | # println("lowering distance") 115 | return findpopcenters(pop, n, mindist/2) 116 | end 117 | push!(coords, index) 118 | fillcircle!(p, Tuple(index), mindist, 0) 119 | end 120 | # display([dist(coords[i], coords[j]) for i = 1:length(coords)-1, j = 2:length(coords) if j > i]) 121 | return Tuple.(coords) 122 | end 123 | 124 | dist(a::CartesianIndex, b::CartesianIndex) = sqrt(sum((Tuple(a) .- Tuple(b)).^2)) 125 | 126 | function fillcircle!(a::AbstractMatrix, center::Tuple{Int,Int}, radius::Real, fillvalue) 127 | nrows, ncols = size(a) 128 | rowrange = max(1, ceil(Int, center[1] - radius)) : min(nrows, floor(Int, center[1] + radius)) 129 | colrange = max(1, ceil(Int, center[2] - radius)) : min(ncols, floor(Int, center[2] + radius)) 130 | for c in colrange, r in rowrange 131 | if (r - center[1])^2 + (c - center[2])^2 <= radius^2 132 | a[r,c] = fillvalue 133 | end 134 | end 135 | end 136 | -------------------------------------------------------------------------------- /src/era5download.jl: -------------------------------------------------------------------------------- 1 | using PyCall, Pkg.TOML 2 | 3 | export era5download, monthlyera5download, saveconfig, download_and_convert_era5 4 | 5 | getconfig(key) = getconfig()[key] 6 | 7 | function getconfig() 8 | configfile = joinpath(homedir(), ".GlobalEnergyGIS_config") 9 | if !isfile(configfile) 10 | error("Configuration file missing, please run saveconfig(datafolder, uid, api_key) first. See GlobalEnergyGIS README.") 11 | end 12 | return TOML.parsefile(configfile) 13 | end 14 | 15 | function saveconfig(datafolder::AbstractString, uid::Int, api_key::AbstractString; agree_terms=false) 16 | !agree_terms && error("You must agree to the terms of use of all datasets to proceed. See GlobalEnergyGIS README.") 17 | downloadsfolder = joinpath(datafolder, "downloads") 18 | mkpath(downloadsfolder) 19 | configfile = joinpath(homedir(), ".GlobalEnergyGIS_config") 20 | open(configfile, "w") do io 21 | d = Dict("datafolder"=>datafolder, "agree_terms"=>agree_terms) 22 | TOML.print(io, d) 23 | println("Configuration file written to $configfile.") 24 | end 25 | cds_id(uid, api_key) 26 | end 27 | 28 | function cds_id(uid::Int, api_key::AbstractString) 29 | filename = joinpath(homedir(), ".cdsapirc") 30 | isfile(filename) && error("$filename already exists, no changes made. Please check its contents manually.") 31 | open(filename, "w") do file 32 | write(file, "url: https://cds.climate.copernicus.eu/api/v2\n") 33 | write(file, "key: $uid:$api_key\n") 34 | println("Copernicus credentials written to $filename.") 35 | end 36 | end 37 | 38 | function download_and_convert_era5(year=2018; datasets=["wind", "solar", "temp"]) 39 | for dataset in datasets 40 | println("\nDownloading ERA5 $dataset data from Copernicus...") 41 | era5download(year; datasets=[dataset]) 42 | println("\nConverting downloaded $dataset data to HDF5 and recompressing...") 43 | if dataset == "solar" 44 | makesolarera5(; year) 45 | elseif dataset == "wind" 46 | makewindera5(; year) 47 | elseif dataset == "temp" 48 | maketempera5(; year) 49 | end 50 | println("\nCleanup: deleting downloaded $dataset data...") 51 | clearvars_era5(; year, datasets=[dataset]) 52 | end 53 | println("\nERA5 datasets $datasets downloaded and converted. Temporary files cleaned up.") 54 | end 55 | 56 | function era5download(year=2018; datasets=["wind", "solar", "temp"]) 57 | mkpath(in_datafolder("downloads")) 58 | count = 0 59 | for dataset in datasets, month = 1:12, monthhalf = 1:2 60 | if dataset == "wind" 61 | vars = ["100m_u_component_of_wind", "100m_v_component_of_wind"] 62 | elseif dataset == "solar" 63 | vars = ["surface_solar_radiation_downwards", "total_sky_direct_solar_radiation_at_surface"] 64 | else 65 | vars = ["2m_temperature"] 66 | end 67 | if monthhalf == 1 68 | firstday, lastday = "01", "15" 69 | else 70 | firstday = "16" 71 | lastday = Dates.daysinmonth(Date("$year-$month")) 72 | end 73 | monthstr = lpad(month,2,'0') 74 | date1, date2 = "$year-$monthstr-$firstday", "$year-$monthstr-$lastday" 75 | outfile = in_datafolder("downloads", "$dataset$year-$monthstr$firstday-$monthstr$lastday.nc") 76 | count += 1 77 | println("\nFile $count of $(24*length(datasets)):") 78 | request_era5_vars(outfile, vars, date1, date2) 79 | end 80 | end 81 | 82 | function monthlyera5download(; datasets=["wind", "solar", "temp"]) 83 | mkpath(in_datafolder("downloads")) 84 | for dataset in datasets 85 | if dataset == "wind" 86 | vars = ["100m_u_component_of_wind", "100m_v_component_of_wind"] 87 | elseif dataset == "solar" 88 | vars = ["surface_solar_radiation_downwards", "total_sky_direct_solar_radiation_at_surface"] 89 | else 90 | vars = ["2m_temperature"] 91 | end 92 | # avoid 2020 to avoid partial-year problems and near real-time ERA5T data 93 | # (which includes an additional dimension to indicate ERA5/ERA5T) 94 | years = 1979:2019 95 | outfile = in_datafolder("downloads", "monthly$(dataset)_$(years[1])-$(years[end]).nc") 96 | request_monthly_era5_vars(outfile, vars, collect(years)) 97 | end 98 | end 99 | 100 | function request_era5_vars(outfile::String, vars::Vector{String}, firstdate::String, lastdate::String) 101 | datestring = "$firstdate/$lastdate" 102 | py""" 103 | import cdsapi 104 | 105 | c = cdsapi.Client() 106 | c.retrieve( 107 | 'reanalysis-era5-single-levels', 108 | { 109 | 'product_type': 'reanalysis', 110 | 'format': 'netcdf', 111 | 'variable': $vars, 112 | 'grid': '0.28125/0.28125', 113 | 'area': '89.859375/-179.859375/-89.859375/179.859375', 114 | 'date': $datestring, 115 | 'time': '00/to/23/by/1' 116 | }, 117 | $outfile) 118 | """ 119 | end 120 | 121 | function request_monthly_era5_vars(outfile::String, vars::Vector{String}, years::Vector{Int}) 122 | stryears = string.(years) 123 | months = string.(1:12) 124 | py""" 125 | import cdsapi 126 | 127 | c = cdsapi.Client() 128 | c.retrieve( 129 | 'reanalysis-era5-single-levels-monthly-means', 130 | { 131 | 'product_type': 'monthly_averaged_reanalysis', 132 | 'format': 'netcdf', 133 | 'variable': $vars, 134 | 'grid': '0.28125/0.28125', 135 | 'area': '89.859375/-179.859375/-89.859375/179.859375', 136 | 'year': $stryears, 137 | 'month': $months, 138 | 'time': '00:00' 139 | }, 140 | $outfile) 141 | """ 142 | end 143 | 144 | # For some reason the delivered NetCDF files are unreadable unless they are limited to 15-16 days each. 145 | 146 | # default resolution: 'grid': '0.25/0.25' (but the max ERA5 resolution is 0.28125 degrees, so use that instead) 147 | # default area: 'area': '90/0/-90/179.75' (note max latitude is 90) 148 | 149 | # The default grid size using a resolution of 0.25 is 721x1440 pixels. 150 | # The default grid size using a resolution of 0.28125 is 641x1280 pixels. 151 | 152 | # 'area': '89.859375/-179.859375/-89.859375/179.859375': get correct center points for a 640x1280 grid (north/west/south/east) 153 | 154 | # The CDS server doesn't use high enough precision to evaluate the area coordinates exactly. Each lon & lat is off 155 | # by about 0.000625 degrees, but this error is very small compared to the resolution (0.28125 degrees). Otherwise 156 | # using the default grid we would have to interpolate to get correct center points, and this would also lose some 157 | # precision. This way we avoid doing all that computational work for a 641x1280x8760 grid. 158 | 159 | # Note that using the default grid with a resolution of 0.28125 also makes longitude errors of up to 0.000244 degrees 160 | # (the numbers come from inspecting latitude & longitude coordinates of ERA5 test data downloads). 161 | 162 | # https://confluence.ecmwf.int/display/CKB/How+to+install+and+use+CDS+API+on+Windows 163 | # https://cds.climate.copernicus.eu/api-how-to#install-the-cds-api-key 164 | # https://confluence.ecmwf.int/display/CKB/How+to+download+ERA5 165 | # https://confluence.ecmwf.int/display/CKB/ERA5+data+documentation 166 | # https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-era5-single-levels?tab=form 167 | # https://cds.climate.copernicus.eu/cdsapp#!/yourrequests -------------------------------------------------------------------------------- /src/sspregiondefinitions.jl: -------------------------------------------------------------------------------- 1 | # Aggregation on the five region level 2 | 3 | const ssp5 = Dict( 4 | # R5.2OECD = Includes the OECD 90 and EU member states and candidates. 5 | "OECD" => [ 6 | "Albania", "Australia", "Austria", "Belgium", "Bosnia and Herzegovina", "Bulgaria", "Canada", 7 | "Croatia", "Cyprus", "Czech Republic", "Denmark", "Estonia", "Finland", "France", "Germany", 8 | "Greece", "Guam", "Hungary", "Iceland", "Ireland", "Italy", "Japan", "Latvia", "Lithuania", 9 | "Luxembourg", "Malta", "Montenegro", "Netherlands", "New Zealand", "Norway", "Poland", 10 | "Portugal", "Puerto Rico", "Romania", "Serbia", "Slovakia", "Slovenia", "Spain", "Sweden", 11 | "Switzerland", "Macedonia", "Turkey", "United Kingdom", "United States" 12 | ], 13 | 14 | # R5.2REF = Countries from the Reforming Economies of Eastern Europe and the Former Soviet Union. 15 | "REF" => [ 16 | "Armenia", "Azerbaijan", "Belarus", "Georgia", "Kazakhstan", "Kyrgyzstan", "Moldova", 17 | "Russia", "Tajikistan", "Turkmenistan", "Ukraine", "Uzbekistan" 18 | ], 19 | 20 | # R5.2ASIA = The region includes most Asian countries with the exception of the Middle East, Japan 21 | # and Former Soviet Union states. China includes Hong Kong and Macao, excludes Taiwan. 22 | "ASIA" => [ 23 | "Afghanistan", "Bangladesh", "Bhutan", "Brunei", "Cambodia", "China", "North Korea", "Fiji", 24 | "French Polynesia", "India", "Indonesia", "Laos", "Malaysia", "Maldives", "Micronesia", 25 | "Mongolia", "Myanmar", "Nepal", "New Caledonia", "Pakistan", "Papua New Guinea", "Philippines", 26 | "South Korea", "Samoa", "Singapore", "Solomon Islands", "Sri Lanka", "Taiwan", "Thailand", 27 | "Timor-Leste", "Vanuatu", "Vietnam" 28 | ], 29 | 30 | # R5.2MAF = This region includes the countries of the Middle East and Africa. 31 | "MAF" => [ 32 | "Algeria", "Angola", "Bahrain", "Benin", "Botswana", "Burkina Faso", "Burundi", "Cameroon", 33 | "Cape Verde", "Central African Republic", "Chad", "Comoros", "Republic of Congo", "Côte d'Ivoire", 34 | "Democratic Republic of the Congo", "Djibouti", "Egypt", "Equatorial Guinea", "Eritrea", "Ethiopia", 35 | "Gabon", "Gambia", "Ghana", "Guinea", "Guinea-Bissau", "Iran", "Iraq", "Israel", "Jordan", "Kenya", 36 | "Kuwait", "Lebanon", "Lesotho", "Liberia", "Libya", "Madagascar", "Malawi", "Mali", "Mauritania", 37 | "Mauritius", "Mayotte", "Morocco", "Mozambique", "Namibia", "Niger", "Nigeria", "Palestina", "Oman", 38 | "Qatar", "Rwanda", "Reunion", "Saudi Arabia", "Senegal", "Sierra Leone", "Somalia", "South Africa", 39 | "South Sudan", "Sudan", "Swaziland", "Syria", "Togo", "Tunisia", "Uganda", "United Arab Emirates", 40 | "Tanzania", "Western Sahara", "Yemen", "Zambia", "Zimbabwe" 41 | ], 42 | 43 | # R5.2LAM = This region includes the countries of Latin America and the Caribbean. 44 | "LAM" => [ 45 | "Argentina", "Aruba", "Bahamas", "Barbados", "Belize", "Bolivia", "Brazil", "Chile", "Colombia", 46 | "Costa Rica", "Cuba", "Dominican Republic", "Ecuador", "El Salvador", "French Guiana", "Grenada", 47 | "Guadeloupe", "Guatemala", "Guyana", "Haiti", "Honduras", "Jamaica", "Martinique", "Mexico", 48 | "Nicaragua", "Panama", "Paraguay", "Peru", "Suriname", "Trinidad and Tobago", 49 | "Virgin Islands, U.S.", "Uruguay", "Venezuela" 50 | ] 51 | ) 52 | 53 | 54 | 55 | # Aggregation on the 32 region level 56 | 57 | const ssp32 = Dict( 58 | "ANUZ" => ["Australia", "New Zealand"], 59 | "BRA" => ["Brazil"], 60 | "CAN" => ["Canada"], 61 | # Central Asia 62 | "CAS" => ["Armenia", "Azerbaijan", "Georgia", "Kazakhstan", "Kyrgyzstan", "Tajikistan", 63 | "Turkmenistan", "Uzbekistan"], 64 | # China (Mainland, Hongkong, Macao; excl. Taiwan) 65 | "CHN" => ["China", "Hong Kong", "Macao"], 66 | # Eastern Europe (excl. former Soviet Union and EU member states) 67 | "EEU" => ["Albania", "Bosnia and Herzegovina", "Croatia", "Montenegro", "Serbia", "Macedonia"], 68 | # Eastern Europe, former Soviet Union (excl. Russia and EU members) 69 | "EEU-FSU" => ["Belarus", "Moldova", "Ukraine"], 70 | "EFTA" => ["Iceland", "Norway", "Switzerland"], 71 | # New EU member states that joined as of 2004 - high income. 72 | "EU12-H" => ["Cyprus", "Czech Republic", "Estonia", "Hungary", "Malta", "Poland", "Slovakia", "Slovenia"], 73 | # New EU member states that joined as of 2004 - medium income. 74 | "EU12-M" => ["Bulgaria", "Latvia", "Lithuania", "Romania"], 75 | # This region includes European Union member states that joined prior to 2004. 76 | "EU15" => ["Austria", "Belgium", "Denmark", "Finland", "France", "Germany", "Greece", "Ireland", "Italy", 77 | "Luxembourg", "Netherlands", "Portugal", "Spain", "Sweden", "United Kingdom"], 78 | "IDN" => ["Indonesia"], 79 | "IND" => ["India"], 80 | "JPN" => ["Japan"], 81 | "KOR" => ["South Korea"], 82 | # This region includes the countries of Latin America (excl. Brazil, Mexico) - low income. 83 | "LAM-L" => ["Belize", "Guatemala", "Haiti", "Honduras", "Nicaragua"], 84 | # This region includes the countries of Latin America (excl. Brazil, Mexico) - medium and high income. 85 | # [Netherlands Antilles not in GADM (new status), replaced with islands listed after Venezuela] 86 | "LAM-M" => ["Antigua and Barbuda", "Argentina", "Bahamas", "Barbados", "Bermuda", "Bolivia", "Chile", 87 | "Colombia", "Costa Rica", "Cuba", "Dominica", "Dominican Republic", "Ecuador", "El Salvador", 88 | "French Guiana", "Grenada", "Guadeloupe", "Guyana", "Jamaica", "Martinique", 89 | "Panama", "Paraguay", "Peru", "Saint Kitts and Nevis", "Saint Lucia", 90 | "Saint Vincent and the Grenadines", "Suriname", "Trinidad and Tobago", "Uruguay", "Venezuela", 91 | "Aruba", "Bonaire, Sint Eustatius and Saba", "Sint Maarten", "Curaçao"], 92 | # This region includes the countries of Middle East Asia - high income. 93 | "MEA-H" => ["Bahrain", "Israel", "Kuwait", "Oman", "Qatar", "Saudi Arabia", "United Arab Emirates"], 94 | # This region includes the countries of Middle East Asia - low and medium income. 95 | "MEA-M" => ["Iran", "Iraq", "Jordan", "Lebanon", "Palestina", "Syria", "Yemen"], 96 | "MEX" => ["Mexico"], 97 | # This region includes the countries of North Africa. 98 | "NAF" => ["Algeria", "Egypt", "Libya", "Morocco", "Tunisia", "Western Sahara"], 99 | # This region includes the countries of Other Asia - former Centrally Planned Asia. 100 | "OAS-CPA" => ["Cambodia", "Laos", "Mongolia", "Vietnam"], 101 | # This region includes the countries of Other Asia - low income. 102 | "OAS-L" => ["Bangladesh", "North Korea", "Fiji", "Micronesia", "Myanmar", "Nepal", "Papua New Guinea", 103 | "Philippines", "Samoa", "Solomon Islands", "Timor-Leste", "Tonga", "Vanuatu"], 104 | # This region includes the countries of Other Asia - medium and high income. 105 | "OAS-M" => ["Bhutan", "Brunei", "French Polynesia", "Guam", "Malaysia", "Maldives", "New Caledonia", 106 | "Singapore", "Sri Lanka", "Thailand"], 107 | "PAK" => ["Pakistan", "Afghanistan"], 108 | "RUS" => ["Russia"], 109 | "SAF" => ["South Africa"], 110 | # This region includes the countries of Subsahara Africa (excl. South Africa) - low income. 111 | "SSA-L" => ["Benin", "Burkina Faso", "Burundi", "Cameroon", "Cape Verde", "Central African Republic", 112 | "Chad", "Comoros", "Republic of Congo", "Côte d'Ivoire", "Democratic Republic of the Congo", 113 | "Djibouti", "Eritrea", "Ethiopia", "Gambia", "Ghana", "Guinea", "Guinea-Bissau", "Kenya", 114 | "Lesotho", "Liberia", "Madagascar", "Malawi", "Mali", "Mauritania", "Mozambique", "Niger", 115 | "Nigeria", "Rwanda", "São Tomé and Príncipe", "Senegal", "Sierra Leone", "Somalia", 116 | "South Sudan", "Sudan", "Swaziland", "Togo", "Uganda", "Tanzania", "Zambia", "Zimbabwe"], 117 | # This region includes the countries of Subsahara Africa (excl. South Africa) - medium and high income. 118 | "SSA-M" => ["Angola", "Botswana", "Equatorial Guinea", "Gabon", "Mauritius", "Mayotte", "Namibia", 119 | "Reunion", "Seychelles"], 120 | "TUR" => ["Turkey"], 121 | "TWN" => ["Taiwan"], 122 | "USA" => ["United States", "Puerto Rico", "Virgin Islands, U.S."] 123 | ) -------------------------------------------------------------------------------- /src/downloaddatasets.jl: -------------------------------------------------------------------------------- 1 | using DataDeps, UrlDownload 2 | 3 | export download_datasets 4 | 5 | # tuples of (dataset_name, filename, url) 6 | # not a const to avoid errors when updating urls 7 | function get_dataset_info() 8 | [ 9 | # https://globalwindatlas.info/api/gis/global/wind-speed/10 10 | # https://globalwindatlas.info/api/gis/global/wind-speed/50 11 | # https://globalwindatlas.info/api/gis/global/wind-speed/100 12 | # https://globalwindatlas.info/api/gis/global/wind-speed/150 13 | # https://globalwindatlas.info/api/gis/global/wind-speed/200 14 | "GWA100" ("Global Wind Atlas", "Global Wind Atlas v3 - 100m wind speed.tif", 15 | "https://chalmersuniversity.box.com/shared/static/wfr6dm9bcmj0mcqtdn0uimhg0otd4ht1.tif") 16 | "GWA150" ("Global Wind Atlas", "Global Wind Atlas v3 - 150m wind speed.tif", 17 | "https://chalmersuniversity.box.com/shared/static/ghexnwa7crukl58nkwric8v9kmlymvj2.tif") 18 | "GWA200" ("Global Wind Atlas", "Global Wind Atlas v3 - 200m wind speed.tif", 19 | "https://chalmersuniversity.box.com/shared/static/7ib2jdni6hu9uwe1hp1mqoeeyqtg5601.tif") 20 | "WDPA" ("WDPA (protected areas):", "WDPA.zip", 21 | "https://d1gam3xoknrgr2.cloudfront.net/current/$(filenameWDPA()).zip") 22 | # "https://chalmersuniversity.box.com/shared/static/wn1kznvy7qh1issqcxdlsq64kgtkaayi.zip") 23 | "GADM" ("GADM (global administrative areas)", "gadm36.zip", 24 | "https://biogeo.ucdavis.edu/data/gadm3.6/gadm36_shp.zip") 25 | "NUTS" ("NUTS (administrative areas in Europe)", "nuts-2016-01m.shp.zip", 26 | "https://ec.europa.eu/eurostat/cache/GISCO/distribution/v2/nuts/download/ref-nuts-2016-01m.shp.zip") 27 | "landcover" ("Land Cover", "Landcover - USGS MODIS.tif", 28 | "https://chalmersuniversity.box.com/shared/static/zm8zdkv1mz0wns0u77afkna95wbo7ggo.tif") 29 | "topography" ("ETOPO1 Topography", "ETOPO1_Ice_c_geotiff.zip", 30 | "https://www.ngdc.noaa.gov/mgg/global/relief/ETOPO1/data/ice_surface/cell_registered/georeferenced_tiff/ETOPO1_Ice_c_geotiff.zip") 31 | "ssppop1" ("1-km downscaled spatial population scenarios (Gao et al), file 1/3", "temp_ssp1.tar", 32 | "http://www.cgd.ucar.edu/iam/modeling/data/SSP1_1km_netcdf.tar") 33 | "ssppop2" ("1-km downscaled spatial population scenarios (Gao et al), file 2/3", "temp_ssp2.tar", 34 | "http://www.cgd.ucar.edu/iam/modeling/data/SSP2_1km_netcdf.tar") 35 | "ssppop3" ("1-km downscaled spatial population scenarios (Gao et al), file 3/3", "temp_ssp3.tar", 36 | "http://www.cgd.ucar.edu/iam/modeling/data/SSP3_1km_netcdf.tar") 37 | "gdppop" ("Global population & GDP", "temp_popgdp.zip", 38 | "https://osf.io/7jv3n/download") 39 | "powerplants" ("WRI Global Power Plant Database", "WRI - Global Power Plant Database v1.10.zip", 40 | "https://chalmersuniversity.box.com/shared/static/ss6gycw7hf10e1fiicbxl5rgk08q5xr9.zip") 41 | # switch to the official v1.2 link later (some plant cleanup is hardcoded for v1.1) 42 | # "http://datasets.wri.org/dataset/540dcf46-f287-47ac-985d-269b04bea4c6/resource/c240ed2e-1190-4d7e-b1da-c66b72e08858/download/globalpowerplantdatabasev120") 43 | "timezones" ("Time zone shape file", "timezones-with-oceans.shapefile.zip", 44 | "https://github.com/evansiroky/timezone-boundary-builder/releases/download/2019b/timezones-with-oceans.shapefile.zip") 45 | "monthlywind" ("Average monthly wind speeds 1979-2019", "era5monthlywind.h5", 46 | "https://chalmersuniversity.box.com/shared/static/otvb5nq0lz5ntqx0e65kocos2afo7gas.h5") 47 | "monthlysolar" ("Average monthly solar insolation 1979-2019", "era5monthlysolar.h5", 48 | "https://chalmersuniversity.box.com/shared/static/jlpspmp9ou96hk7xno46rf79wg3d5hqc.h5") 49 | "various" ("Various smaller datasets", "Various_smaller_datasets.zip", 50 | "https://chalmersuniversity.box.com/shared/static/w3pmx4xhgorgd6jejv23gn4ycsnza8s6.zip") 51 | ] 52 | end 53 | 54 | filenameWDPA(monthyear=Dates.format(now(), "uyyyy")) = "WDPA_WDOECM_$(monthyear)_Public_all_shp" 55 | 56 | download_datasets(startfrom::Int) = download_datasets(get_dataset_info()[startfrom:end, 1]...) 57 | download_datasets(datasetindices::AbstractVector) = download_datasets(get_dataset_info()[datasetindices, 1]...) 58 | 59 | function download_datasets(shortnames::String...) 60 | datafolder = getconfig("datafolder") 61 | if !isfile(datafolder) 62 | mkpath(datafolder) 63 | end 64 | 65 | dataset_info = get_dataset_info() 66 | datasets = Dict(r[1] => r[2] for r in eachrow(dataset_info)) 67 | shortnames = isempty(shortnames) ? dataset_info[:, 1] : shortnames 68 | 69 | for (i, shortname) in enumerate(shortnames) 70 | fullname, filename, url = datasets[shortname] 71 | 72 | println("\nDownloading dataset $i: $fullname") 73 | download_progressbar(url, joinpath(datafolder, filename)) 74 | unpack_and_cleanup(shortname, filename, datafolder, dataset_info) 75 | end 76 | println("\nDownloads complete.") 77 | end 78 | 79 | function unpack_and_cleanup(shortname, filename, datafolder, dataset_info) 80 | foldername, extension = splitext(filename) 81 | fullpath = joinpath(datafolder, filename) 82 | 83 | if extension in [".zip", ".tar"] && isfile(fullpath) 84 | println("\nUnpacking archive: $filename") 85 | unpack(fullpath, joinpath(datafolder, foldername), extension) 86 | end 87 | 88 | function renameWDPAfiles(WDPAfolder) 89 | for filename in readdir(WDPAfolder) 90 | newname = replace(filename, filenameWDPA() => "WDPA-shapefile") 91 | if newname != filename 92 | mv(joinpath(WDPAfolder, filename), joinpath(WDPAfolder, newname)) 93 | end 94 | end 95 | end 96 | 97 | if shortname == "WDPA" 98 | renameWDPAfiles(joinpath(datafolder, "WDPA")) 99 | for i = 0:2 100 | foldername = "WDPA-shapefile_$i" 101 | println("\nUnpacking archive: $foldername.zip") 102 | unpack(joinpath(datafolder, "WDPA", "$foldername.zip"), 103 | joinpath(datafolder, "WDPA", foldername), ".zip") 104 | renameWDPAfiles(joinpath(datafolder, "WDPA", foldername)) 105 | end 106 | elseif shortname == "NUTS" 107 | filename = "NUTS_RG_01M_2016_4326_LEVL_3.shp.zip" 108 | println("\nUnpacking archive: $filename") 109 | unpack(joinpath(datafolder, "nuts-2016-01m.shp", filename), joinpath(datafolder, "nuts2016-level3"), ".zip") 110 | rm(joinpath(datafolder, "nuts-2016-01m.shp"), force=true, recursive=true) 111 | elseif shortname == "topography" 112 | mv(joinpath(datafolder, "ETOPO1_Ice_c_geotiff", "ETOPO1_Ice_c_geotiff.tif"), joinpath(datafolder, "ETOPO1_Ice_c_geotiff.tif")) 113 | rm(joinpath(datafolder, "ETOPO1_Ice_c_geotiff")) 114 | elseif shortname[1:end-1] == "ssppop" 115 | sspfolder = joinpath(datafolder, "SSP_1km") 116 | !isdir(sspfolder) && mkdir(sspfolder) 117 | for ssp = 1:3 118 | for y = 2010:10:2100 119 | sspfile = joinpath(datafolder, "temp_ssp$ssp", "SSP$(ssp)_1km", "ssp$(ssp)_total_$y.nc4") 120 | isfile(sspfile) && mv(sspfile, joinpath(sspfolder, "ssp$(ssp)_total_$y.nc4")) 121 | end 122 | rm(joinpath(datafolder, "temp_ssp$ssp"), force=true, recursive=true) 123 | end 124 | elseif shortname == "gdppop" 125 | mv(joinpath(datafolder, "temp_popgdp", "data"), joinpath(datafolder, "global_population_and_gdp"), force=true) 126 | rm(joinpath(datafolder, "temp_popgdp")) 127 | elseif shortname == "powerplants" 128 | mv(joinpath(datafolder, "WRI - Global Power Plant Database v1.10"), joinpath(datafolder, "tempWRI"), force=true) 129 | mv(joinpath(datafolder, "tempWRI", "WRI - Global Power Plant Database v1.10"), 130 | joinpath(datafolder, "WRI - Global Power Plant Database v1.10")) 131 | rm(joinpath(datafolder, "tempWRI"), force=true, recursive=true) 132 | elseif shortname == "various" 133 | mixeddir = joinpath(datafolder, "Various_smaller_datasets") 134 | for file in readdir(mixeddir) 135 | mv(joinpath(mixeddir, file), joinpath(datafolder, file), force=true) 136 | end 137 | rm(mixeddir) 138 | end 139 | 140 | if extension in [".zip", ".tar"] && isfile(fullpath) 141 | rm(fullpath) 142 | end 143 | end 144 | 145 | function download_progressbar(url::AbstractString, filename::AbstractString) 146 | println("Downloading to $filename...") 147 | # UrlDownload can take care of unpacking too, look into this. 148 | urldownload(url, true, compress=:none, parser=identity, save_raw=filename) 149 | end 150 | 151 | function unpack(inputfilename, outputpath, extension) 152 | !isdir(outputpath) && mkdir(outputpath) 153 | run(DataDeps.unpack_cmd(inputfilename, outputpath, extension, "")) 154 | end 155 | -------------------------------------------------------------------------------- /src/solarposition.jl: -------------------------------------------------------------------------------- 1 | export solarposition, zenith, zenith_azimuth, sines_and_cosines 2 | 3 | # Calculate the position of the sun as (zenith,azimuth) in degrees for a location 4 | # on earth given as (latitude,longitude) in degrees. The DateTime should be given 5 | # as Universal Time, e.g. UTC. 6 | function solarposition_wiki(datetime::DateTime, latitude, longitude) 7 | # Position of the Sun as seen from Earth. 8 | # Ported from Matlab, original helpstring below. 9 | 10 | # This is the most basic algorithm. It is documented in Seinfeld & 11 | # Pandis, Duffie & Beckman and Wikipedia. 12 | # 13 | # [ANGLES,PROJECTION] = SOLARPOSITION(DATE,TIME,LATITUDE,LONGITUDE,TIME_ZONE) 14 | # returns ZENITH & AZIMUTH for all DATE & TIME pairs at LATITUDE, LONGITUDE. 15 | # ANGLES = [ZENITH,AZIMUTH] and PROJECTION = [PHI_X, PHI_Y] 16 | # PHI_X is projection on x-z plane & PHI_Y is projection on y-z plane. 17 | # DATETIME can be string, vector [YEAR, MONTH, DAY, HOURS, MINUTES, SECONDS], 18 | # cellstring or matrix N x [YEAR, MONTH, DAY, HOURS, MINUTES, SECONDS] for N 19 | # times. 20 | # LATITUDE [degrees] and LONGITUDE [degrees] are the coordinates of the site. 21 | # TIME_ZONE [hours] of the site. 22 | # ROTATION [degrees] clockwise rotation of system relative to north. 23 | # DST [logical] flag for daylight savings time, typ. from March to November 24 | # in the northern hemisphere. 25 | # 26 | # References: 27 | # http://en.wikipedia.org/wiki/Solar_azimuth_angle 28 | # http://en.wikipedia.org/wiki/Solar_elevation_angle 29 | # 30 | # Mark A. Mikofski 31 | # Copyright (c) 2013 32 | 33 | 34 | 35 | # Equation of Time 36 | # "Alternative calculation" on wikipedia: 37 | # https://en.wikipedia.org/wiki/Equation_of_time#Calculating_the_equation_of_time 38 | # Supposedly accurate to 6 seconds (0.1 minutes) 39 | 40 | year = Dates.year(datetime) 41 | # date in days starting at zero on 1 January (the subtraction produces a difference in milliseconds) 42 | d_ms = datetime - DateTime(year,1,1) 43 | d = d_ms.value/1000/3600/24 44 | 45 | # Earth's mean angular orbital velocity [degrees/day] 46 | w = 360/365.24 47 | # angle the earth moves on its orbit at its average speed from the December solstice 48 | # 10 is the approximate number of days from the December solstice to January 1 49 | a = w * (d+10) 50 | # The angle the Earth moves from the solstice to date D, including a first-order correction 51 | # for the Earth's orbital eccentricity, 0.0167. The number 2 is the number of days from 1 January 52 | # to the date of the Earth's perihelion. 53 | b = a + (360/pi*0.0167)*sind(w*(d-2)) 54 | # C is the difference between the angles moved at mean speed, and at the corrected speed projected 55 | # onto the equatorial plane, and divided by 180 to get the difference in "half turns". The value 23.44° 56 | # is the obliquity (tilt) of the Earth's axis. The subtraction gives the conventional sign to the equation of time. 57 | c = (a - atand(tand(b)/cosd(23.44))) / 180 58 | # For any given value of x, arctan x (sometimes written as tan−1 x) has multiple values, differing from each other 59 | # by integer numbers of half turns. This may cause C to be wrong by an integer number of half turns. The excess 60 | # half turns are removed in the next step of the calculation to give the equation of time: 61 | ET = 720*(c - round(c)) 62 | 63 | # approximate solar time [hours] 64 | solarTime = 24*(d - floor(d)) + longitude*24/360 + ET/60 65 | t_h = 15*(solarTime - 12) # [degrees] hour angle 66 | 67 | # declination [degrees] 68 | # accurate to 0.2 degrees, see https://en.wikipedia.org/wiki/Position_of_the_Sun#Calculations 69 | delta = -asind(sind(23.44)*cosd(b)) 70 | zenith = acosd(sind(latitude)*sind(delta) + cosd(latitude)*cosd(delta)*cosd(t_h)) # [degrees] zenith 71 | 72 | # azimuth [0, 180], absolute value measured from due south, so east = west = 90, 73 | # south = 0, north = 180 74 | cos_phi = (cosd(zenith)*sind(latitude) - sind(delta)) ./ (sind(zenith)*cosd(latitude)) # cosine(azimuth) 75 | phi_south = acosd(clamp(cos_phi,-1,1)) 76 | #phi_south = acosd(cos_phi); 77 | 78 | # azimuth [0, 360], measured clockwise from due north, 79 | # so east = 90, south = 180, and west = 270 degrees 80 | azimuth = 180 + sign(t_h)*phi_south # Shift domain to 0-360 deg 81 | 82 | #angles = [theta, phi]; # [degrees] zenith, azimuth 83 | return zenith, azimuth, solarTime, delta 84 | end 85 | 86 | function solarposition_wiki_faster(datetime::DateTime, latitude, longitude) 87 | year = Dates.year(datetime) 88 | d_ms = datetime - DateTime(year,1,1) 89 | d = d_ms.value/1000/3600/24 90 | w = 360/365.24 91 | a = w * (d+10) 92 | b = a + (360/pi*0.0167)*sind(w*(d-2)) 93 | c = (a - atand(tand(b)/cosd(23.44))) / 180 94 | ET = 720*(c - round(c)) 95 | solarTime = 24*(d - floor(d)) + longitude*24/360 + ET/60 96 | t_h = 15*(solarTime - 12) # [degrees] hour angle 97 | delta = -asind(sind(23.44)*cosd(b)) 98 | slat, clat = sind(latitude), cosd(latitude) 99 | sdel, cdel = sind(delta), cosd(delta) 100 | czen = slat*sdel + clat*cdel*cosd(t_h) 101 | zenith = acosd(czen) # [degrees] zenith 102 | 103 | # azimuth [0, 180], absolute value measured from due south, so east = west = 90, 104 | # south = 0, north = 180 105 | cos_phi = (czen*slat - sdel) / (sind(zenith)*clat) # cosine(azimuth) 106 | phi_south = acosd(clamp(cos_phi,-1,1)) 107 | #phi_south = acosd(cos_phi); 108 | 109 | # azimuth [0, 360], measured clockwise from due north, 110 | # so east = 90, south = 180, and west = 270 degrees 111 | azimuth = 180 + sign(t_h)*phi_south # Shift domain to 0-360 deg 112 | 113 | #angles = [theta, phi]; # [degrees] zenith, azimuth 114 | return zenith, azimuth, solarTime, delta 115 | end 116 | 117 | 118 | 119 | # Splits the DateTime into (year, month, day) (as integers) and the hour (as a floating point) 120 | function splitdatetime(datetime::DateTime) 121 | # date = floor(datetime, Dates.Day) 122 | y, m, d = Dates.yearmonthday(datetime) 123 | return y, m, d, (datetime - DateTime(y,m,d)).value/1000/3600 124 | end 125 | 126 | # time correction term, difference between Terrestrial Time and Universal Time, see Grena (2012) section 2.1. 127 | # Conversion to TT is necessary since UT follows the rotation of the earth, which is not uniform. 128 | # t [days] days from January 1, 2060 (in UT) 129 | # returns Δτ [seconds] 130 | differenceTTUT(t) = 96.4 + 0.00158*t 131 | 132 | # Calculate time variable needed for Grena (2012) algorithms, see section 3.1. 133 | # The time t is the input DateTime expressed as days from January 1, 2060 (in UT). 134 | # returns t [days] 135 | calctime(datetime::DateTime) = (datetime-DateTime(2060,1,1)).value/1000/3600/24 136 | 137 | # Same result as above, but this is the function in Grena's paper. The Julia version is faster. 138 | function calctime_grena(datetime::DateTime) 139 | y, m, d, h = splitdatetime(datetime) 140 | if m <= 2 141 | m += 12 142 | y -= 1 143 | end 144 | return trunc(365.25*(y-2000)) + trunc(30.6001*(m+1)) - trunc(0.01*y) + d + 0.0416667*h - 21958 145 | end 146 | 147 | # Full version of algorithm 1 of Grena (2012), see section 3.2. 148 | # Calculate the position of the sun as (zenith,azimuth) in radians for a location 149 | # on earth given as (latitude,longitude) in degrees. The DateTime should be given 150 | # as Universal Time, e.g. UTC. 151 | function solarposition_grena1(datetime::DateTime, latitude, longitude) 152 | ω = 0.017202786 # [1/day] 153 | t = calctime(datetime) 154 | t_e = t + 1.1574e-5 * differenceTTUT(t) 155 | s1, c1 = sin(ω*t_e), cos(ω*t_e) 156 | s2, c2 = 2*s1*c1, (c1+s1)*(c1-s1) 157 | α = mod(-1.38880 + 1.72027920e-2*t_e + 3.199e-2*s1 - 2.65e-3*c1 + 4.050e-2*s2 + 1.525e-2*c2, 2π) 158 | δ = 6.57e-3 + 7.347e-2*s1 - 3.9919e-1*c1 + 7.3e-4*s2 - 6.60e-3*c2 159 | H = mod(1.75283 + 6.3003881*t + deg2rad(longitude) - α + π, 2π) - π 160 | sϕ, cϕ = sind(latitude), cosd(latitude) 161 | sδ, cδ = sin(δ), cos(δ) 162 | sH, cH = sin(H), cos(H) 163 | se0 = sϕ*sδ + cϕ*cδ*cH 164 | ep = asin(se0) - 4.26e-5*sqrt(1-se0^2) #cos(e0) 165 | azimuth = atan(sH, cH*sϕ - sδ*cϕ/cδ) #azimuth = 0 towards south and positive direction towards west 166 | zenith = π/2 - ep 167 | return zenith, azimuth # radians 168 | # return zenith, azimuth, δ, H # radians 169 | # return rad2deg(zenith), rad2deg(mod(azimuth+π,2π)), rad2deg(H)/15+12, rad2deg(δ) # degrees, same azimuth origin as solarposition0 170 | end 171 | 172 | # First half of algorithm 1 of Grena (2012), see section 3.2. 173 | # Calculate the position of the sun as (declination, hour angle) in radians for a location 174 | # on earth given by its longitude in degrees. The DateTime should be given 175 | # as Universal Time, e.g. UTC. 176 | function solarposition(datetime::DateTime, longitude) 177 | ω = 0.017202786 # [1/day] 178 | t = calctime(datetime) 179 | t_e = t + 1.1574e-5 * differenceTTUT(t) 180 | s1, c1 = sin(ω*t_e), cos(ω*t_e) 181 | s2, c2 = 2*s1*c1, (c1+s1)*(c1-s1) 182 | α = mod(-1.38880 + 1.72027920e-2*t_e + 3.199e-2*s1 - 2.65e-3*c1 + 4.050e-2*s2 + 1.525e-2*c2, 2π) 183 | δ = 6.57e-3 + 7.347e-2*s1 - 3.9919e-1*c1 + 7.3e-4*s2 - 6.60e-3*c2 184 | H = mod(1.75283 + 6.3003881*t + deg2rad(longitude) - α + π, 2π) - π 185 | return δ, H 186 | end 187 | 188 | # Full version of algorithm 1 of Grena (2012), see section 3.2. 189 | # Calculate the position of the sun as (zenith, azimuth) in degrees for a location 190 | # on earth given as (latitude,longitude) in degrees. The DateTime should be given 191 | # as Universal Time, e.g. UTC. 192 | function solarposition(datetime::DateTime, latitude, longitude) 193 | δ, H = solarposition(datetime, longitude) 194 | zenith, azimuth = zenith_azimuth(latitude, sines_and_cosines(δ, H)...) 195 | return rad2deg(zenith), rad2deg(azimuth) 196 | end 197 | 198 | sines_and_cosines(δ, H) = sin(δ), cos(δ), sin(H), cos(H) 199 | 200 | function zenith_azimuth(latitude, sδ, cδ, sH, cH) 201 | sϕ, cϕ = sind(latitude), cosd(latitude) 202 | se0 = sϕ*sδ + cϕ*cδ*cH 203 | azimuth = atan(sH, cH*sϕ - sδ*cϕ/cδ) # azimuth = 0 towards south and positive direction towards west 204 | zenith = π/2 - asin(se0) + 4.26e-5*sqrt(1-se0^2) # sqrt(1-se0^2) = cos(e0), but slightly faster 205 | return zenith, azimuth # radians 206 | end 207 | 208 | function zenith(latitude, sδ, cδ, sH, cH) 209 | sϕ, cϕ = sind(latitude), cosd(latitude) 210 | se0 = sϕ*sδ + cϕ*cδ*cH 211 | return π/2 - asin(se0) + 4.26e-5*sqrt(1-se0^2) # sqrt(1-se0^2) = cos(e0), but slightly faster 212 | end 213 | 214 | const solarconstant = 1361 # W/m2 215 | 216 | # Annual variation of solar insolation outside the atmosphere (W/m2): 217 | # https://www.itacanet.org/the-sun-as-a-source-of-energy/part-2-solar-energy-reaching-the-earths-surface/#2.1.-The-Solar-Constant 218 | solarinsolation(dt::DateTime) = solarconstant * (1 + 0.034*cos(2*π*(dt - DateTime(year(dt))).value/1000/3600/24/365.25)) 219 | 220 | # dt = DateTime(2019,4,15,13,46) 221 | # δ, H = solarposition(dt, 12) # return rad2deg(zenith), rad2deg(mod(azimuth+π,2π)), rad2deg(H)/15+12, rad2deg(δ) 222 | 223 | # pos = sines_and_cosines(δ, H) 224 | # zen = zenith(58, pos...) 225 | 226 | -------------------------------------------------------------------------------- /src/make_regions.jl: -------------------------------------------------------------------------------- 1 | export GADM, NUTS, makeregions, makeregions_nuts, makeoffshoreregions, saveregions, loadregions, 2 | saveregions_global, subregions, all_gadm_subregions 3 | 4 | abstract type RegionType end 5 | 6 | struct GADM{T} <: RegionType 7 | parentregions::Vector{T} 8 | subregionnames::NTuple{N,T} where N 9 | end 10 | GADM(regionnames::T...) where T = GADM(T[], regionnames) 11 | GADM(parentregions::Vector{T}, subregionnames::T...) where T = GADM(parentregions, subregionnames) 12 | 13 | struct NUTS{T} <: RegionType 14 | subregionnames::NTuple{N,T} where N 15 | end 16 | NUTS(regionnames::T...) where T = NUTS(regionnames) 17 | 18 | const NOREGION = typemax(Int16) 19 | 20 | function saveregions(regionname, subregionnames, regions::Matrix{Int32}; autocrop=true, bbox=[-90 -180; 90 180]) 21 | land = JLD.load(in_datafolder("landcover.jld"), "landcover") 22 | saveregions(regionname, subregionnames, regions, :GADM, land, autocrop, bbox) 23 | end 24 | 25 | function saveregions(regionname, regiondefinitionarray; autocrop=true, bbox=[-90 -180; 90 180]) 26 | land = JLD.load(in_datafolder("landcover.jld"), "landcover") 27 | if !all(bbox .== [-90 -180; 90 180]) 28 | autocrop = false # ignore supplied autocrop option if user changed bbox 29 | end 30 | regions, regiontype = makeregions(regiondefinitionarray; allowmixed=(regionname=="Europe_background")) 31 | saveregions(regionname, regiondefinitionarray, regions, regiontype, land, autocrop, bbox) 32 | end 33 | 34 | function saveregions(regionname, regiondefinitionarray, regions, regiontype, landcover, autocrop, bbox) 35 | if autocrop 36 | # get indexes of the bounding box containing onshore region data with 6% of padding 37 | lonrange, latrange = getbboxranges(regions) 38 | padding = round(Int, maximum(size(regions[lonrange,latrange])) * 0.06) 39 | lonrange, latrange = getbboxranges(regions, padding) 40 | else 41 | latrange, lonrange = bbox2ranges(roundbbox(bbox,100), 100) # TO DO: remove hardcoded raster density 42 | end 43 | landcover = landcover[lonrange, latrange] 44 | regions = regions[lonrange, latrange] 45 | 46 | if regionname != "Global_GADM0" && regionname != "Europe_background" 47 | if regiontype == :NUTS 48 | println("\nNUTS region definitions detected (using Europe_background region file)...") 49 | europeregions = loadregions("Europe_background")[1][lonrange, latrange] 50 | regions[(regions.==0) .& (europeregions.>0)] .= NOREGION 51 | elseif regiontype == :GADM 52 | println("\nGADM region definitions detected (using Global_GADM0 region file)...") 53 | globalregions = loadregions("Global_GADM0")[1][lonrange, latrange] 54 | regions[(regions.==0) .& (globalregions.>0)] .= NOREGION 55 | end 56 | end 57 | 58 | # Find the closest region pixel for all non-region pixels (land and ocean) 59 | println("\nAllocate non-region pixels to the nearest region (for offshore wind)...") 60 | territory = regions[feature_transform(regions.>0)] 61 | 62 | # Allocate ocean and lake pixels to the region with the closest land region. 63 | # Even VERY far offshore pixels will be allocated to whatever region is nearest, but 64 | # those areas still won't be available for offshore wind power because of the 65 | # requirement to be close enough to the electricity grid (or rather the grid proxy). 66 | offshoreregions = territory .* (landcover .== 0) 67 | 68 | if regionname != "Global_GADM0" && regionname != "Europe_background" 69 | # Allocate land pixels with region==0 to the closest land region. 70 | # This ensures that the regions dataset is pixel-compatible with the landcover dataset. 71 | regions = territory .* (landcover .> 0) 72 | end 73 | 74 | println("\nSaving regions and offshoreregions...") 75 | regionlist = Symbol.(regiondefinitionarray[:,1]) 76 | 77 | JLD.save(in_datafolder("regions_$regionname.jld"), "regions", regions, "offshoreregions", offshoreregions, 78 | "regionlist", regionlist, "lonrange", lonrange, "latrange", latrange, compress=true) 79 | end 80 | 81 | function saveregions_global(; args...) 82 | println("\nCreating a global GADM region file to identify countries and land areas later...") 83 | g = readdlm(in_datafolder("gadmfields.csv"), ',', skipstart=1) 84 | gadm0 = unique(string.(g[:,2])) 85 | regiondefinitionarray = [gadm0 GADM.(gadm0)] 86 | saveregions("Global_GADM0", regiondefinitionarray; args..., autocrop=false) 87 | println("Global GADM region file saved.") 88 | 89 | println("\nCreating a 'background' NUTS region file to identify non-European land areas later...") 90 | regiondefinitionarray = [NUTS_Europe; non_NUTS_Europe] 91 | saveregions("Europe_background", regiondefinitionarray; args..., autocrop=false) 92 | println("\nEurope_background region file saved.") 93 | 94 | println("\nCreating a region file for the 44 countries with synthetic demand training data...") 95 | regiondefinitionarray = [syntheticdemandregions GADM.(syntheticdemandregions)] 96 | saveregions("SyntheticDemandRegions", regiondefinitionarray; args...) 97 | println("\nSyntheticDemandRegions file saved.") 98 | end 99 | 100 | function loadregions(regionname) 101 | jldopen(in_datafolder("regions_$regionname.jld"), "r") do file 102 | return read(file, "regions"), read(file, "offshoreregions"), read(file, "regionlist"), 103 | read(file, "lonrange"), read(file, "latrange") 104 | end 105 | end 106 | 107 | function makeregions(regiondefinitionarray; allowmixed=false) 108 | regionnames, nutsdef, gadmdef = splitregiondefinitions(regiondefinitionarray) 109 | use_nuts, use_gadm = !all(isempty.(nutsdef)), !all(isempty.(gadmdef)) 110 | regiontype = (use_gadm && !use_nuts) ? :GADM : 111 | (use_nuts && !use_gadm) ? :NUTS : 112 | (use_nuts && use_gadm) ? :MIXED : :WEIRD 113 | !allowmixed && regiontype==:MIXED && error("Sorry, mixed NUTS & GADM definitions are not supported yet.") 114 | region = zeros(Int16, (36000,18000)) # hard code size for now 115 | if use_nuts 116 | nuts, subregionnames = read_nuts() 117 | makeregions_nuts!(region, nuts, subregionnames, nutsdef) 118 | end 119 | if use_gadm 120 | gadm, subregionnames = read_gadm() 121 | makeregions_gadm!(region, gadm, subregionnames, gadmdef) 122 | end 123 | return region, regiontype 124 | end 125 | 126 | function regions2matlab(gisregion) 127 | regions, offshoreregions, regionlist, lonrange, latrange = loadregions(gisregion) 128 | matopen(in_datafolder("regions_$gisregion.mat"), "w", compress=true) do file 129 | write(file, "regions", regions) 130 | write(file, "offshoreregions", offshoreregions) 131 | write(file, "regionlist", string.(regionlist)) 132 | write(file, "lonrange", collect(lonrange)) 133 | write(file, "latrange", collect(latrange)) 134 | end 135 | end 136 | 137 | function splitregiondefinitions(regiondefinitionarray) 138 | regionnames = regiondefinitionarray[:,1] 139 | regiondefinitions = [regdef isa Tuple ? regdef : (regdef,) for regdef in regiondefinitionarray[:,2]] 140 | nutsdef = [Tuple(rd for rd in regdef if rd isa NUTS) for regdef in regiondefinitions] 141 | gadmdef = [Tuple(rd for rd in regdef if rd isa GADM) for regdef in regiondefinitions] 142 | return regionnames, nutsdef, gadmdef 143 | end 144 | 145 | function makeregions_gadm!(region, gadm, subregionnames, regiondefinitions) 146 | println("Making region index matrix...") 147 | regionlookup = build_inverseregionlookup(regiondefinitions) 148 | rows, cols = size(region) 149 | updateprogress = Progress(cols, 1) 150 | for c in randperm(cols) 151 | for r = 1:rows 152 | gadm_uid = gadm[r,c] 153 | (gadm_uid == 0 || gadm_uid == 78413 || region[r,c] > 0) && continue # ignore Caspian Sea (weirdly classified as a region in GADM) 154 | reg0, reg1, reg2 = subregionnames[gadm_uid,:] 155 | regid = lookup_regionnames(regionlookup, reg0, reg1, reg2) 156 | if regid > 0 157 | region[r,c] = regid 158 | end 159 | end 160 | next!(updateprogress) 161 | end 162 | end 163 | 164 | function makeregions_nuts!(region, nuts, subregionnames, regiondefinitions) 165 | println("Making region index matrix...") 166 | regionlookup = Dict(r => i for (i,tuptup) in enumerate(regiondefinitions) 167 | for ntup in tuptup for r in ntup.subregionnames) 168 | rows, cols = size(region) 169 | updateprogress = Progress(cols, 1) 170 | for c in randperm(cols) 171 | for r = 1:rows 172 | nuts_id = nuts[r,c] 173 | (nuts_id == 0 || region[r,c] > 0) && continue 174 | reg = subregionnames[nuts_id] 175 | while length(reg) >= 2 176 | regid = get(regionlookup, reg, 0) 177 | if regid > 0 178 | region[r,c] = regid 179 | break 180 | end 181 | reg = reg[1:end-1] 182 | end 183 | end 184 | next!(updateprogress) 185 | end 186 | end 187 | 188 | function lookup_regionnames(regionlookup, reg0, reg1, reg2) 189 | v = get(regionlookup, (reg0, "*", "*"), 0) 190 | v > 0 && return v 191 | v = get(regionlookup, (reg0, reg1, "*"), 0) 192 | v > 0 && return v 193 | return get(regionlookup, (reg0, reg1, reg2), 0) 194 | end 195 | 196 | function build_inverseregionlookup(regiondefinitions) 197 | d = Dict{Tuple{String,String,String}, Int}() 198 | for reg = 1:length(regiondefinitions) 199 | for regdef in regiondefinitions[reg] 200 | parentregions, subregionnames = regdef.parentregions, regdef.subregionnames 201 | regions = ["*", "*", "*"] 202 | regions[1:length(parentregions)] = parentregions 203 | for s in subregionnames 204 | regions[length(parentregions)+1] = s 205 | d[regions...] = reg 206 | end 207 | end 208 | end 209 | return d 210 | end 211 | 212 | function getsubregions(regtype::Type{GADM}, regionnames) 213 | gadm = readdlm(in_datafolder("gadmfields.csv"), ',', skipstart=1)[:, 2:4] 214 | for reg in regionnames 215 | gadm = gadm[gadm[:,1].==reg, 2:end] 216 | end 217 | return sort(unique(gadm[:,1])) 218 | end 219 | 220 | function getsubregions(regtype::Type{NUTS}, regionname) 221 | length(regionname) > 1 && error("Give only one string argument to match beginning of NUTS region names.") 222 | nuts = readdlm(in_datafolder("nutsfields.csv"), ',', skipstart=1)[:, 3] 223 | if isempty(regionname) 224 | return sort(unique(first.(nuts, 2))) 225 | else 226 | return sort(nuts[startswith.(nuts, regionname)]) 227 | end 228 | end 229 | 230 | function subregions(regtype::Type{T} where T <: RegionType, regionnames::String...) 231 | reglist = join(regionnames, ", ") 232 | selected = string.(getsubregions(regtype, regionnames)) 233 | selectedlist = join(selected, ", ") 234 | # isempty(regionnames) && println("Showing top level $regtype regions:") 235 | # println("$regtype($reglist): $selectedlist") 236 | return selected 237 | end 238 | 239 | function all_gadm_subregions(country::AbstractString, level::Int) 240 | level == 1 && return [country GADM(country)] 241 | return all_gadm_subregions(country, subregions(GADM, country), level) 242 | end 243 | 244 | function all_gadm_subregions(country::AbstractString, reg2::AbstractString, level::Int) 245 | reg2 == "" && return [country GADM(country)] 246 | level == 2 && return [reg2 GADM([country], reg2)] 247 | return vcat(all_gadm_subregions.(country, reg2, subregions(GADM, country, reg2))...) 248 | end 249 | 250 | function all_gadm_subregions(country::AbstractString, reg2::AbstractString, reg3::AbstractString) 251 | reg3 == "" && return [reg2 GADM([country], reg2)] 252 | return [reg3 GADM([country, reg2], reg3)] 253 | end 254 | 255 | all_gadm_subregions(countries::AbstractArray, level::Int) = 256 | vcat(all_gadm_subregions.(countries, level)...) 257 | 258 | all_gadm_subregions(country::AbstractString, regions2::AbstractArray, level::Int) = 259 | vcat(all_gadm_subregions.(country, regions2, level)...) 260 | -------------------------------------------------------------------------------- /src/map_test_plots.jl: -------------------------------------------------------------------------------- 1 | function vtest() 2 | fig = Figure() 3 | 4 | ## Voronoi tessellation: Make tessellations from their dual triangulation 5 | pts = 25randn(2, 500) 6 | tri = triangulate(pts) 7 | vorn = voronoi(tri) 8 | 9 | ax = GeoMakie.Axis(fig[2, 2], title="(f): Voronoi tessellation", titlealign=:left, width=400, height=400) 10 | voronoiplot!(ax, vorn, show_generators=false) 11 | 12 | ## Clipped Voronoi tessellation 13 | vorn = voronoi(tri, true) 14 | ax = GeoMakie.Axis(fig[2, 3], title="(g): Clipped Voronoi tessellation", titlealign=:left, width=400, height=400) 15 | voronoiplot!(ax, vorn, show_generators=false, color=:white) 16 | 17 | ## Centroidal Voronoi tessellation (CVT) 18 | points = [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)] 19 | tri = triangulate(points; boundary_nodes=[1, 2, 3, 4, 1]) 20 | refine!(tri; max_area=1e-3, min_angle=29.871) 21 | vorn = voronoi(tri) 22 | smooth_vorn = centroidal_smooth(vorn; maxiters=2500) 23 | ax = GeoMakie.Axis(fig[2, 4], title="(h): Centroidal Voronoi tessellation", titlealign=:left, width=400, height=400) 24 | voronoiplot!(ax, smooth_vorn, show_generators=true, markersize=4, colormap=:jet) 25 | 26 | resize_to_layout!(fig) 27 | fig 28 | end 29 | 30 | function vtest2() 31 | pts = 25 * randn(2, 500) 32 | tri = triangulate(pts) 33 | vorn = voronoi(tri) 34 | clip_vorn = voronoi(tri, true) 35 | smooth_vorn = centroidal_smooth(clip_vorn) 36 | 37 | cmap = Makie.cgrad(:jet) 38 | colors = get_polygon_colors(vorn, cmap) 39 | fig = Figure(fontsize=38, resolution=(1540, 485)) 40 | for (j, vor) in enumerate((vorn, clip_vorn, smooth_vorn)) 41 | ax = Makie.Axis(fig[1, j], width=400, height=400) 42 | voronoiplot!(ax, vor, strokecolor=:red, strokewidth=0.2, polygon_color=colors, markersize=4) 43 | xlims!(ax, -100, 100) 44 | ylims!(ax, -100, 100) 45 | end 46 | return fig 47 | end 48 | 49 | # Helper method deleted from DelaunayTriangulation after v0.6 50 | function get_polygon_colors(vorn::VoronoiTessellation, cmap) 51 | F = DelaunayTriangulation.number_type(vorn) 52 | gtr = [get_generator(vorn, i) for i in each_generator(vorn)] 53 | gtr_mat = reinterpret(reshape, F, gtr) 54 | colors = get(cmap, gtr_mat, :extrema) 55 | return [(a + b) / 2 for (a, b) in eachcol(colors)] 56 | end 57 | 58 | function mtest() 59 | f = Figure() 60 | ax = GeoMakie.Axis(f[1,1]) 61 | hm = heatmap!(ax, 1:4, 1:4, repeat([2 2 3 1], outer=4), colormap=cgrad(:viridis, 3, categorical=true), shading=NoShading) 62 | Colorbar(f[:, 2], hm) 63 | f 64 | end 65 | 66 | function mtest2() 67 | f = Figure() 68 | ax = GeoAxis(f[1,1]) 69 | hm = surface!(ax, 11:14, 51:54, repeat([2 2 3 1], outer=4), colormap=cgrad(:viridis, 3, categorical=true), shading=NoShading) 70 | Colorbar(f[:, 2], hm) 71 | f 72 | end 73 | 74 | function geotest_old() 75 | source = "+proj=longlat +datum=WGS84" 76 | dest = "+proj=natearth2" 77 | ptrans = Makie.PointTrans{2}(Proj.Transformation(source, dest, always_xy=true)) 78 | 79 | fig = Figure() 80 | ax = GeoMakie.Axis(fig[1,1], aspect = DataAspect()) 81 | 82 | # all input data coordinates are projected using this function 83 | ax.scene.transformation.transform_func[] = ptrans 84 | 85 | # draw projected grid lines and set limits accordingly 86 | lats = -90:10.0:90 87 | lons = -180:10.0:180 88 | lons = collect(lons) 89 | lons[end] = prevfloat(lons[end]) # avoid PROJ wrapping 180 to -180 90 | sz = length(lons), length(lats) 91 | points = map(CartesianIndices(sz)) do xy 92 | x, y = Tuple(xy) 93 | Point2f(lons[x], lats[y]) 94 | end 95 | limits = Rect2f(Makie.apply_transform(ptrans, points)) 96 | limits!(ax, limits) 97 | wireframe!(ax, lons, lats, zeros(sz), color=(:gray, 0.2), transparency=true) 98 | 99 | # add black polygons for land area 100 | url = "https://raw.githubusercontent.com/nvkelso/natural-earth-vector/master/geojson/" 101 | land = Downloads.download(url * "ne_110m_land.geojson", IOBuffer()) 102 | land_geo = GeoJSON.read(seekstart(land)) 103 | n = length(GeoInterface.features(land_geo)) 104 | poly!(ax, land_geo, color=1:n) 105 | 106 | # add grey dots for populated places 107 | pop = Downloads.download(url * "ne_10m_populated_places_simple.geojson", IOBuffer()) 108 | pop_geo = GeoJSON.read(seekstart(pop)) 109 | # scatter!(ax, GeoMakie.geo2basic(pop_geo), color="lightgrey", markersize=1.2) 110 | 111 | hidedecorations!(ax) # x & y axis numbers 112 | hidespines!(ax) # outer rectangle 113 | 114 | # return fig 115 | Makie.save("geomakie_figtest.png", ax.scene, resolution=(1000,600)) 116 | screen = GLMakie.singleton_screen(false) 117 | close(screen) 118 | GLMakie.destroy!(screen) 119 | end 120 | 121 | function geotest_new() 122 | # GLMakie.activate!(px_per_unit = 4) # hide 123 | 124 | source = "+proj=longlat +datum=WGS84" 125 | dest = "+proj=natearth2" 126 | 127 | land = GeoMakie.assetpath("ne_110m_land.geojson") 128 | land_geo = GeoJSON.read(read(land, String)) 129 | pop = GeoMakie.assetpath("ne_10m_populated_places_simple.geojson") 130 | pop_geo = GeoJSON.read(read(pop, String)) 131 | 132 | begin 133 | fig = Figure(size = (1000,500)) 134 | ga = GeoAxis( 135 | fig[1, 1]; 136 | source = source, 137 | dest = dest 138 | ) 139 | 140 | ga.xticklabelsvisible[] = false 141 | ga.yticklabelsvisible[] = false 142 | poly!(ga, land_geo, color=:black) 143 | popisqrt = map(pop_geo) do geo 144 | popi = geo.pop_max 145 | popi > 0 ? sqrt(popi) : 0.0 146 | end 147 | mini, maxi = extrema(popisqrt) 148 | msize = map(popisqrt) do popi 149 | normed = (popi .- mini) ./ (maxi - mini) 150 | return (normed * 20) .+ 1 151 | end 152 | scatter!(ga, pop_geo; color=popisqrt, markersize=msize) 153 | fig 154 | end 155 | end 156 | 157 | # Hack for missing method in GeoMakie/MakieCore, https://github.com/MakieOrg/GeoMakie.jl/issues/201 158 | Makie.point_iterator(::Makie.MakieCore.Text{Tuple{Vector{Makie.GlyphCollection}}}) = Point2f[] 159 | 160 | function geotest2() 161 | # GLMakie.activate!(px_per_unit = 4) # hide 162 | img = rotr90(GeoMakie.earth()) 163 | fig = Figure(size = (1200, 600)) 164 | ga2 = GeoAxis(fig[1, 1]; dest = "+proj=moll", title = "Image of Earth") 165 | surface!(ga2, (-180f0)..(180f0), -90f0..90f0, zeros(size(img)); shading = NoShading, color = img) 166 | text!(ga2, 30, 45, text = "center", align = (:center, :center)) 167 | save("geotest2.png", fig; px_per_unit=2) # needs point_iterator() above 168 | fig 169 | end 170 | 171 | function geotest3() # README example, not quite working 172 | # GLMakie.activate!(px_per_unit = 4) # hide 173 | 174 | fieldlons = -180:180; fieldlats = -90:90 175 | field = [exp(cosd(lon)) + 3(lat/90) for lon in fieldlons, lat in fieldlats] 176 | 177 | img = rotr90(GeoMakie.earth()) 178 | land = GeoMakie.land() 179 | 180 | fig = Figure(size = (1000, 1000)) 181 | 182 | ga1 = GeoAxis(fig[1, 1]; dest = "+proj=ortho", title = "Orthographic\n ") 183 | ga2 = GeoAxis(fig[1, 2]; dest = "+proj=moll", title = "Image of Earth\n ") 184 | ga3 = GeoAxis(fig[2, 1]; title = "Plotting polygons") 185 | ga4 = GeoAxis(fig[2, 2]; dest = "+proj=natearth", title = "Auto limits") # you can plot geodata on regular axes too 186 | 187 | surface!(ga1, fieldlons, fieldlats, field; colormap = :rainbow_bgyrm_35_85_c69_n256, shading = NoShading) 188 | lines!(ga1, GeoMakie.coastlines(); transformation = (; translation = (0, 0, 1))) # maybe transformation only need in GLMakie, not CairoMakie 189 | image!(ga2, -180..180, -90..90, img; interpolate = false) 190 | poly!(ga3, land[50:100]; color = 1:51, colormap = (:plasma, 0.5)) 191 | poly!(ga4, land[22]); #datalims!(ga4) 192 | 193 | fig 194 | end 195 | 196 | function geotest4() 197 | # GLMakie.activate!(px_per_unit = 4) # hide 198 | 199 | lons = -180:180 200 | lats = -90:90 201 | field = [exp(cosd(l)) + 3(y/90) for l in lons, y in lats] 202 | 203 | fig = Figure() 204 | ga = GeoAxis( 205 | fig[1, 1], 206 | dest="+proj=ortho", 207 | title = "Orthographic projection", 208 | xticklabelcolor=:red, xgridcolor=:red, 209 | ) 210 | lp = lines!(ga, GeoMakie.coastlines(); transformation = (; translation = (0, 0, 1))) 211 | sp = surface!(ga, lons, lats, zeros(size(field)); color=field, shading = NoShading, colormap=:rainbow_bgyrm_35_85_c69_n256) 212 | cb = Colorbar(fig[1, 2], sp) 213 | fig 214 | end 215 | 216 | function geotest5() 217 | # GLMakie.activate!(px_per_unit = 4) # hide 218 | 219 | lons = -180:180 220 | lats = -90:90 221 | field = [exp(cosd(l)) + 3(y / 90) for l in lons, y in lats] 222 | 223 | fig = Figure() 224 | ax1 = GeoAxis(fig[1, 1], dest = "+proj=vitk1 +lat_1=45 +lat_2=55",title = "vitk1", xgridcolor=:red) 225 | ax2 = GeoAxis(fig[1, 2]; dest="+proj=wintri", title = "wintri") 226 | 227 | surface!(ax1, lons, lats, field; shading = NoShading, colormap = (:plasma, 0.45)) 228 | surface!(ax2, lons, lats, field; shading = NoShading, colormap = (:plasma, 0.45)) 229 | 230 | fig 231 | end 232 | 233 | function geotest6() 234 | # GLMakie.activate!(px_per_unit = 4) # hide 235 | 236 | lons = -180:180 237 | lats = -90:90 238 | # Create some field of values across `lons` and `lats`. 239 | # 240 | # This grid can be of any density, but note that the 241 | # time it takes to plot scales with the grid size! 242 | field = [exp(cosd(l)) + 3(y/90) for l in lons, y in lats] 243 | 244 | fig = Figure() 245 | ax = GeoAxis(fig[1,1]) 246 | contourf!(ax, lons, lats, field) 247 | fig 248 | end 249 | 250 | function geotest7() 251 | # GLMakie.activate!(px_per_unit = 4) # hide 252 | 253 | projections = ["+proj=adams_hemi", "+proj=adams_ws1", "+proj=adams_ws2", 254 | "+proj=aea +lat_1=29.5 +lat_2=42.5", "+proj=aeqd", "+proj=airy", "+proj=aitoff", 255 | "+proj=apian", "+proj=august", "+proj=bacon", "+proj=bertin1953", "+proj=bipc +ns", 256 | "+proj=boggs", "+proj=bonne +lat_1=10", "+proj=cass", "+proj=cea", 257 | "+proj=chamb +lat_1=10 +lon_1=30 +lon_2=40", "+proj=collg", "+proj=comill", 258 | "+proj=crast", "+proj=denoy", "+proj=eck1", "+proj=eck2", "+proj=eck3", 259 | "+proj=eck4", "+proj=eck5", "+proj=eck6", "+proj=eqc", "+proj=eqdc +lat_1=55 +lat_2=60", 260 | "+proj=eqearth", "+proj=euler +lat_1=67 +lat_2=75", "+proj=fahey", "+proj=fouc", "+proj=fouc_s", 261 | "+proj=gall", "+proj=geos +h=35785831.0 +lon_0=-60 +sweep=y", "+proj=gins8", "+proj=gn_sinu +m=2 +n=3", 262 | "+proj=goode", "+proj=guyou", "+proj=hammer", "+proj=hatano", 263 | "+proj=igh", "+proj=igh_o +lon_0=-160", "+proj=imw_p +lat_1=30 +lat_2=-40", "+proj=isea", 264 | "+proj=kav5", "+proj=kav7", "+proj=laea", "+proj=lagrng", "+proj=larr", "+proj=lask", 265 | "+proj=lcca +lat_0=35", "+proj=leac", "+proj=loxim", 266 | "+proj=lsat +ellps=GRS80 +lat_1=-60 +lat_2=60 +lsat=2 +path=2", "+proj=mbt_s", "+proj=mbt_fps", 267 | "+proj=mbtfpp", "+proj=mbtfpq", "+proj=mbtfps", "+proj=merc", "+proj=mill", "+proj=misrsom +path=1", 268 | "+proj=moll", "+proj=murd1 +lat_1=30 +lat_2=50", 269 | "+proj=murd3 +lat_1=30 +lat_2=50", "+proj=natearth", "+proj=natearth2", 270 | "+proj=nell", "+proj=nell_h", "+proj=nicol", 271 | "+proj=ob_tran +o_proj=mill +o_lon_p=40 +o_lat_p=50 +lon_0=60", "+proj=ocea", "+proj=oea +m=1 +n=2", 272 | "+proj=omerc +lat_1=45 +lat_2=55", "+proj=ortel", "+proj=ortho", "+proj=patterson", "+proj=poly", 273 | "+proj=putp1", "+proj=putp2", "+proj=putp3", "+proj=putp3p", "+proj=putp4p", "+proj=putp5", 274 | "+proj=putp5p", "+proj=putp6", "+proj=putp6p", "+proj=qua_aut", "+proj=robin", "+proj=rouss", 275 | "+proj=rpoly", "+proj=sinu", "+proj=times", "+proj=tissot +lat_1=60 +lat_2=65", "+proj=tmerc", 276 | "+proj=tobmerc", "+proj=tpeqd +lat_1=60 +lat_2=65", "+proj=urm5 +n=0.9 +alpha=2 +q=4", 277 | "+proj=urmfps +n=0.5", "+proj=vandg", "+proj=vandg2", "+proj=vandg3", "+proj=vandg4", 278 | "+proj=vitk1 +lat_1=45 +lat_2=55", "+proj=wag1", "+proj=wag2", "+proj=wag3", "+proj=wag4", 279 | "+proj=wag5", "+proj=wag6", "+proj=wag7", "+proj=webmerc +datum=WGS84", "+proj=weren", 280 | "+proj=wink1", "+proj=wink2", "+proj=wintri"] 281 | let k = 1 282 | fig = Figure(size=(1500, 1500)) 283 | @time for i in 1:10, j in 1:3 284 | try 285 | ga = GeoAxis( 286 | fig[i, j]; 287 | dest=projections[k], 288 | title="$(projections[k])", 289 | ) 290 | lines!(ga, GeoMakie.coastlines()) 291 | catch error 292 | println("Error at iteration $k") 293 | break 294 | end 295 | 296 | k += 1 297 | end 298 | fig 299 | end 300 | end 301 | 302 | function geotest8() 303 | # https://datahub.io/core/geo-countries#curl # download data from here 304 | path = GeoMakie.assetpath("vector", "countries.geo.json") 305 | json_str = read(path, String) 306 | worldCountries = GeoJSON.read(json_str) 307 | n = length(worldCountries) 308 | lons = -180:180 309 | lats = -90:90 310 | field = [exp(cosd(l)) + 3(y/90) for l in lons, y in lats] 311 | 312 | fig = Figure(size = (1200,800), fontsize = 22) 313 | 314 | ax = GeoAxis( 315 | fig[1,1]; 316 | dest="+proj=wintri", 317 | title = "World Countries", 318 | tellheight = true, 319 | ) 320 | 321 | hm1 = surface!(ax, lons, lats, field; shading = NoShading) 322 | translate!(hm1, 0, 0, -10) 323 | 324 | hm2 = poly!( 325 | ax, worldCountries; # vector data (probably)! 326 | color= 1:n, 327 | colormap = Reverse(:plasma), 328 | strokecolor = :black, 329 | strokewidth = 0.25 330 | ) 331 | 332 | cb = Colorbar(fig[1,2]; colorrange = (1, n), colormap = Reverse(:plasma), label = "variable, color code", height = Relative(0.65)) 333 | 334 | fig 335 | end 336 | -------------------------------------------------------------------------------- /src/makesolarera5.jl: -------------------------------------------------------------------------------- 1 | export makesolarera5, makemonthlysolarera5, clearvars_era5 2 | 3 | # TO DO: save FDIR to disk (masked by land cover) for parabolic troughs (oriented north-south). 4 | 5 | # Can optionally zero water cells in the landcover dataset to save a lot of disk space. 6 | # add options to save all three CSP variables: tower, trough-NS, trough-EW. 7 | function makesolarera5(; year=2018, land_cells_only=true) 8 | hours = 24*Dates.daysinyear(year) 9 | gridsize = (1280,640) 10 | 11 | datafolder = getconfig("datafolder") 12 | downloadsfolder = joinpath(datafolder, "downloads") 13 | 14 | filename = joinpath(datafolder, "era5solar$year.h5") 15 | isfile(filename) && error("File $filename exists in $datafolder, please delete or rename manually.") 16 | 17 | land = imresize(JLD.load(joinpath(datafolder, "landcover.jld"), "landcover"), gridsize) 18 | 19 | println("Creating HDF5 file: $filename") 20 | h5open(filename, "w") do file 21 | group = file["/"] 22 | # create GTI and DNI variables (Global Tilted Irradiance and Direct Normal Irradiance) 23 | dataset_GTI = create_dataset(group, "GTI", datatype(Float32), dataspace(hours,gridsize...), chunk=(hours,16,16), blosc=3) 24 | dataset_DNI = create_dataset(group, "DNI", datatype(Float32), dataspace(hours,gridsize...), chunk=(hours,16,16), blosc=3) 25 | dataset_meanGTI = create_dataset(group, "meanGTI", datatype(Float32), dataspace(gridsize...), chunk=gridsize, blosc=3) 26 | dataset_meanDNI = create_dataset(group, "meanDNI", datatype(Float32), dataspace(gridsize...), chunk=gridsize, blosc=3) 27 | 28 | totalGTI = zeros(gridsize) 29 | totalDNI = zeros(gridsize) 30 | hour = 1 31 | 32 | count = 0 33 | for month = 1:12, monthhalf = 1:2 34 | if monthhalf == 1 35 | firstday, lastday = "01", "15" 36 | else 37 | firstday = "16" 38 | lastday = Dates.daysinmonth(Date("$year-$month")) 39 | end 40 | monthstr = lpad(month,2,'0') 41 | date = "$year-$monthstr-$firstday/$year-$monthstr-$lastday" 42 | erafile = joinpath(downloadsfolder, "solar$year-$monthstr$firstday-$monthstr$lastday.nc") 43 | 44 | count += 1 45 | println("\nFile $count of 24:") 46 | println("Reading solar diffuse and direct components from $erafile...") 47 | ncdataset = Dataset(erafile) 48 | # GHI = replace(ncdataset["ssrd"][:,:,:], missing => 0.0) .* (land .> 0) ./ (3600*1000) 49 | # DHI = GHI - replace(ncdataset["fdir"][:,:,:], missing => 0.0) .* (land .> 0) ./ (3600*1000) 50 | ssrd = nomissing(ncdataset["ssrd"][:,:,:], 0.0) 51 | fdir = nomissing(ncdataset["fdir"][:,:,:], 0.0) 52 | datetime = nomissing(ncdataset["time"][:], DateTime(0)) 53 | GTI, DNI = @time transform_solar_vars(ssrd, fdir, datetime, land, land_cells_only) 54 | 55 | totalGTI += sumdrop(GTI, dims=1) 56 | totalDNI += sumdrop(DNI, dims=1) 57 | len = size(GTI,1) 58 | println("Writing to $filename...") 59 | dataset_GTI[hour:hour+len-1,:,:] = GTI 60 | dataset_DNI[hour:hour+len-1,:,:] = DNI 61 | hour += len 62 | end 63 | println("\nWriting annual mean solar variables to $filename...") 64 | dataset_meanGTI[:,:] = totalGTI/hours 65 | dataset_meanDNI[:,:] = totalDNI/hours 66 | end 67 | nothing 68 | end 69 | 70 | function makemonthlysolarera5(; land_cells_only=true) 71 | years = 1979:2019 72 | nyears = length(years) 73 | nmonths = nyears*12 74 | gridsize = (1280,640) 75 | 76 | datafolder = getconfig("datafolder") 77 | downloadsfolder = joinpath(datafolder, "downloads") 78 | 79 | filename = joinpath(datafolder, "era5monthlysolar.h5") 80 | isfile(filename) && error("File $filename exists in $datafolder, please delete or rename manually.") 81 | 82 | land = imresize(JLD.load(joinpath(datafolder, "landcover.jld"), "landcover"), gridsize) 83 | 84 | println("Creating HDF5 file: $filename") 85 | h5open(filename, "w") do file 86 | group = file["/"] 87 | # create GTI and DNI variables (Global Tilted Irradiance and Direct Normal Irradiance) 88 | dataset_ssrd = create_dataset(group, "monthlyssrd", datatype(Float32), dataspace(nmonths,gridsize...), chunk=(nmonths,16,16), blosc=3) 89 | dataset_fdir = create_dataset(group, "monthlyfdir", datatype(Float32), dataspace(nmonths,gridsize...), chunk=(nmonths,16,16), blosc=3) 90 | dataset_annualssrd = create_dataset(group, "annualssrd", datatype(Float32), dataspace(nyears,gridsize...), chunk=(nyears,16,16), blosc=3) 91 | dataset_annualfdir = create_dataset(group, "annualfdir", datatype(Float32), dataspace(nyears,gridsize...), chunk=(nyears,16,16), blosc=3) 92 | 93 | erafile = in_datafolder("downloads", "monthlysolar_$(years[1])-$(years[end]).nc") 94 | 95 | println("Reading solar diffuse and direct components from $erafile...") 96 | # Permute dimensions to get hours as dimension 1 (for efficient iteration in GISwind()) 97 | ncdataset = Dataset(erafile) 98 | ssrd = permutedims(nomissing(ncdataset["ssrd"][:,:,:], 0.0) .* (land .> 0), [3,1,2]) 99 | fdir = permutedims(nomissing(ncdataset["fdir"][:,:,:], 0.0) .* (land .> 0), [3,1,2]) 100 | 101 | println("Writing to $filename...") 102 | # For these monthly average insolations we skip the sun position calculations 103 | # made for the hourly dataset and just assign SSRD & FDIR directly. 104 | dataset_ssrd[:,:,:] = ssrd 105 | dataset_fdir[:,:,:] = fdir 106 | for y = 1:nyears 107 | dataset_annualssrd[y,:,:] = sum(ssrd[12*(y-1) .+ (1:12),:,:], dims=1) ./ 12 108 | dataset_annualfdir[y,:,:] = sum(fdir[12*(y-1) .+ (1:12),:,:], dims=1) ./ 12 109 | end 110 | end 111 | nothing 112 | end 113 | 114 | function transform_solar_vars(ssrd, fdir, datetime, land, land_cells_only) 115 | println("Calculating GTI and DNI using solar positions...") 116 | 117 | erares = 0.28125 118 | eralons = -180+erares/2:erares:180-erares/2 # longitude values (pixel center) 119 | eralats = 90-erares/2:-erares:-90+erares/2 # latitude values (pixel center) 120 | 121 | # Permute dimensions to get hours as dimension 1 (for efficient iteration in GISsolar()) 122 | nlons, nlats, nhours = size(ssrd) 123 | GTI = zeros(Float32, (nhours, nlons, nlats)) 124 | DNI = zeros(Float32, (nhours, nlons, nlats)) 125 | 126 | albedo = 0.2 # standard average value for ground albedo (maybe use landcover data later) 127 | azimuthPV = π * (eralats .< 0) # assume equator-facing PV panels (azimuth = 0 if lat>0, π if lat<0) 128 | tiltPV = optimalPVtilt.(eralats) # degrees 129 | # println("Check if PV tilt sign is correct in the southern hemisphere!") 130 | cos_tiltPVs = cosd.(tiltPV) 131 | sin_tiltPVs = sind.(tiltPV) 132 | 133 | almostzero = eps(Float32) 134 | 135 | for h = 1:nhours 136 | # Note that while the solar position calculations are instantaneous positions, ERA5 radiation variables 137 | # represent accumulated radiation over the hour *ending* at the indicated time. Therefore, solar positions 138 | # must be shifted 30 minutes BACK to correspond to the midpoint time of the ERA5 accumulations. 139 | # Source: ERA5 "accumulations are over the hour ending at the forecast step" 140 | # https://confluence.ecmwf.int//display/CKB/ERA5+data+documentation#ERA5datadocumentation-Meanratesandaccumulations 141 | dt = datetime[h] - Minute(30) 142 | TSI = solarinsolation(dt)/1000 # Total Solar Irradiance (top of atmosphere, perpendicular to sun) 143 | for lon = 1:nlons 144 | eralon = eralons[lon] 145 | δ, H = solarposition(dt, eralon) # absolute solar position (declination, hour angle) 146 | solarpos = sines_and_cosines(δ,H) 147 | for lat = 1:nlats 148 | eralat = eralats[lat] 149 | land_cells_only && land[lon,lat] == 0 && continue 150 | 151 | zenith, azimuth = zenith_azimuth(eralat, solarpos...) # relative solar position 152 | cos_zen = max(almostzero, cos(zenith)) 153 | 154 | cos_tiltPV = cos_tiltPVs[lat] 155 | sin_tiltPV = sin_tiltPVs[lat] 156 | 157 | # ERA5 radiations are in J/m2/period, so for hourly data divide by 3600*1000 to get kW/m2 158 | GHI = ssrd[lon,lat,h]/(3600*1000) # Global Horizontal Irradiance 159 | FDIR = fdir[lon,lat,h]/(3600*1000) 160 | DHI = GHI - FDIR # Diffuse Horizontal Irradiance 161 | 162 | # When the solar elevation is close to 0, both FDIR and cos(zenith) will also be near 0, and 163 | # calculated DNI will approach "0/0". So we'll clamp DNI to avoid artifacts. 164 | # That wasn't enough, so we'll add an artificial term to increase the denominator near the horizon. 165 | dni = clamp(FDIR / (cos_zen + horizoncorrection(zenith)), 0, TSI) # Direct Normal Irradiance 166 | 167 | # Cosine of angle of incidence 168 | cos_AOI = max(0, cos_zen*cos_tiltPV + sin(zenith)*sin_tiltPV*cos(azimuth-azimuthPV[lat])) 169 | Rb = cos_AOI/max(0.017, cos_zen) # cos_zen clamped to cos(89) = 0.017 170 | 171 | # AI is the "anisotropy index", i.e. beam radiation transmittance through atmosphere. 172 | # We need it to consider circumsolar diffuse radiation (the bright area of the sky near the sun). 173 | AI = dni / TSI 174 | 175 | beamradiation = dni*cos_AOI # direct beam radiation from the sun 176 | groundradiation = GHI*albedo*(1-cos_tiltPV)/2 # diffuse reflected radiation from the ground 177 | skyradiation = DHI*AI*Rb + DHI*(1-AI)*(1+cos_tiltPV)/2 # diffuse radiation from the sky 178 | 179 | GTI[h,lon,lat] = beamradiation + groundradiation + skyradiation 180 | DNI[h,lon,lat] = dni 181 | end 182 | end 183 | end 184 | return GTI, DNI 185 | end 186 | 187 | # Optimal tilt angle (degrees) of PV modules as a function of latitude 188 | # Jacobson 2018, https://web.stanford.edu/group/efmh/jacobson/Articles/I/TiltAngles.pdf 189 | # (Motivation: when the sky is overcast, a shallower tilt captures more diffuse radiation. 190 | # Higher latitutes tend to have cloudier weather. Also, you can play with the tilt to 191 | # trade-off between summer and winter insolation.) 192 | function optimalPVtilt(ϕ) 193 | if ϕ > 0 194 | clamp(1.3793 + ϕ*(1.2011 + ϕ*(-0.014404 + ϕ*0.000080509)), 0, 90) 195 | else 196 | clamp(-0.41657 + ϕ*(1.4216 + ϕ*(0.024051 + ϕ*0.00021828)), -90, 0) 197 | end 198 | end 199 | 200 | # correction term to reduce horizon artifacts 201 | horizoncorrection(zen) = 1/50 * max(0, rad2deg(zen)-85).^2 202 | 203 | function clearvars_era5(; year=2018, datasets=["wind", "solar", "temp"]) 204 | for dataset in datasets, month = 1:12, monthhalf = 1:2 205 | if monthhalf == 1 206 | firstday, lastday = "01", "15" 207 | else 208 | firstday = "16" 209 | lastday = Dates.daysinmonth(Date("$year-$month")) 210 | end 211 | monthstr = lpad(month,2,'0') 212 | date = "$year-$monthstr-$firstday/$year-$monthstr-$lastday" 213 | erafile = in_datafolder("downloads", "$dataset$year-$monthstr$firstday-$monthstr$lastday.nc") 214 | if isfile(erafile) 215 | println("Deleting $erafile...") 216 | rm(erafile) 217 | end 218 | end 219 | end 220 | 221 | 222 | # SOLAR INSOLATION CALCULATIONS FOR PV 223 | # 224 | # ERA5 radiations are those accumulated during one hour onto a square meter of a flat horizontal plane. 225 | # This holds for both SSRD and FDIR (see Hogan, "Radiation Quantities in the ECMWF model and MARS"). 226 | # ERA5 vars: SSRD = Surface Solar Radiation Downwards, FDIR = DIRect solar radiation at the surface. 227 | # 228 | # We will use the following abbreviations and identifiers: 229 | # 230 | # GHI = Global Horizontal Irradiance (here "global" means direct + diffuse) 231 | # DHI = Diffuse Horizontal Irradiance 232 | # DNI = Direct Normal Irradiance 233 | # 234 | # AOI = angle of incidence of sun on panel 235 | # β = PV panel tilt angle 236 | # z = solar zenith angle = 90 - solar elevation angle 237 | # ρ = ground albedo ≈ 0.2 (we can try estimating albedo from land cover data later) 238 | # Rb = cos_AOI / cosz (= I{bT}/I{b}, ratio of beam irradiance on a tilted surface and a horizontal surface) 239 | # AZsolar is the azimuth (direction of the sun) measured from south with positive direction towards west. 240 | # AZpv 241 | # 242 | # We have: 243 | # FDIR = DNI * cosz 244 | # GHI = SSRD = FDIR + DHI = DNI*cosz + DHI 245 | # cos_AOI = cosz*cosβ + sinz*sinβ*cos(AZsolar-AZpv) 246 | # 247 | # For solar PV, we need to estimate Global Tilted Irradiance (GTI) from GHI. 248 | # There are three components to the radiation that hits a tilted PV panel: 249 | # 1. direct beam radiation from the sun 250 | # 2. diffuse radiation from the sky 251 | # 3. diffuse reflected radiation from the ground 252 | # 253 | # Components 1 and 3 are easy (see any textbook or paper on solar radiation, e.g. Loutzenhiser 2007): 254 | # 1. FDIR * Rb = FDIR * cos_AOI / cosz = DNI * cos_AOI 255 | # 3. GHI * ρ * (1 - cosβ)/2 256 | # 257 | # For an evaluation of approaches on estimation of diffuse sky radiation, see Loutzenhiser 2007. 258 | # For now we will use the simplest approach and assume the sky is isotropic (i.e. diffuse radiation 259 | # is uniformly distributed over the sky dome). Then we have: 260 | # 261 | # 2. DHI * (1 + cosβ)/2 262 | # 263 | # But the sky around the sun is significantly brighter ("circumsolar diffuse radiation"). Later, 264 | # if we also download the TISR variable from ERA5 (top of atmosphere incident solar radiation), 265 | # we can use the Hay and Davies model from 1980 to consider circumsolar diffuse radiation: 266 | # (Hay-Davies validation with measurements: Mubarak et al 2017) 267 | # 268 | # 2. DHI*AI*Rb + DHI*(1-AI)*(1+cosβ)/2, with AI = FDIR / TISR = DNI / solarconstant 269 | # 270 | # AI is the "anisotropy index", i.e. beam radiation transmittance through atmosphere. 271 | # (TISR, like FDIR, is also defined as radiation hitting a horizontal plane) 272 | # 273 | # See also textbook by Petros Axaopoulos (chapter 4) 274 | # http://www.labri.fr/perso/billaud/Helios2/docs/page-all-tiles.php 275 | # 276 | # 277 | # SOLAR INSOLATION CALCULATIONS FOR CSP 278 | # 279 | # Ihe insolation per m2 on a 2-axis solar tower collector is the direct normal irradiance (DNI). 280 | # 281 | # For a 1-axis parabolic trough, the axis is usually (but not always) oriented north-south, and the rotation 282 | # of the trough follows the sun east-west. The collector captures the same insolation as a horizontal surface. 283 | # A trough with its axis oriented east-west will track the elevation of the sun, but not its azimuth. 284 | # 285 | # North-south oriented axis: The insolation per m2 of collector is FDIR = DNI * cos(zenith). 286 | # East-west oriented axis: The insolation per m2 of collector is DNI * cos(azimuth). 287 | 288 | # ADJUST LAND USE BY LATITUDE!!!!! 289 | # WE DON'T CORRECT EFFICIENCY FOR TEMPERATURES (or wind speed!) see Jacobson. 290 | -------------------------------------------------------------------------------- /src/syntheticdemand_inputdata.jl: -------------------------------------------------------------------------------- 1 | export buildtrainingdata, loadtrainingdata, loaddemanddata, savetrainingdata 2 | 3 | # Can call this with ssp5 or ssp32 (global constants) 4 | # Since not all GADM level 0 regions are assigned to SSP regions, some regions are missed 5 | # (mostly small island nations and the like). This captures 99.85% of global population. 6 | function make_sspregionlookup(ssp) 7 | println("Lookup SSP R5.2 region names for each GADM level 0 region...") 8 | _, _, regionlist, _, _ = loadregions("Global_GADM0") 9 | sspregions = fill("", length(regionlist)) 10 | for (sspregionname, gadmregionnames) in ssp 11 | for gname in gadmregionnames 12 | index = findfirst(isequal(Symbol(gname)), regionlist) 13 | if index == nothing 14 | error("Region $gname is not a GADM level 0 region.") 15 | end 16 | sspregions[index] = sspregionname 17 | end 18 | end 19 | return sspregions # Vector{String}(length numcountries, indexed by gadm country code) 20 | end 21 | 22 | function getnationalpopulation(scenarioyear) 23 | filename = in_datafolder("nationalpopulation_$scenarioyear.jld") 24 | natpop = isfile(filename) ? JLD.load(filename, "natpop") : savenationalpopulation(scenarioyear) 25 | return natpop 26 | end 27 | 28 | # load population dataset and sum globally by country 29 | # Vector{Float64}(length numcountries, indexed by gadm country code) 30 | function savenationalpopulation(scenarioyear) 31 | println("Calculating population in all GADM level 0 regions...") 32 | pop = JLD.load(in_datafolder("population_$scenarioyear.jld"), "population") # persons per pixel 33 | regions, _, regionlist, lonrange, latrange = loadregions("Global_GADM0") 34 | natpop = zeros(length(regionlist)) 35 | for j in latrange 36 | for i in lonrange 37 | reg = regions[i,j] 38 | if reg > 0 39 | natpop[reg] += pop[i,j] 40 | end 41 | end 42 | end 43 | JLD.save(in_datafolder("nationalpopulation_$scenarioyear.jld"), "natpop", natpop, compress=true) 44 | return natpop 45 | end 46 | 47 | # Get current national electricity demand from IEA online statistics (actually "domestic supply"). 48 | # https://www.iea.org/statistics/?country=MOROCCO&year=2016&category=Electricity&indicator=ElecGenByFuel&mode=table&dataTable=ELECTRICITYANDHEAT 49 | function ieademand() 50 | println("Get current national electricity demand from IEA statistics...") 51 | iea = CSV.read(in_datafolder("ieademand_2016.csv"), DataFrame) # GWh/year 52 | _, _, regionlist, _, _ = loadregions("Global_GADM0") 53 | nationaldemand = zeros(length(regionlist)) 54 | for row in eachrow(iea) 55 | country, demand = row 56 | country == uppercase(country) && continue # skip IEA aggregated regions in uppercase, e.g. "ASIA" 57 | f = findfirst(regionlist .== Symbol(country)) 58 | f == nothing && error("IEA country $country missing in GADM regions.") 59 | nationaldemand[f] = demand/1000 60 | end 61 | return nationaldemand # TWh/year 62 | end 63 | 64 | # Lookup electricity demand from a SSP database dataframe. Return a single dataframe row. 65 | ssplookup(ssp, model, scen, region) = 66 | ssp[(ssp[:,:MODEL] .== model) .& (ssp[:,:SCENARIO] .== scen) .& (ssp[:,:REGION] .== region), :][1,:] 67 | 68 | # Calculate regional multiplers for demand from 2016 to target year 69 | function calcdemandmultipliers(sspscenario::String, year::Int) 70 | println("Calculate demand multipliers...") 71 | # First read electricity demand from the SSP database into a dataframe. 72 | ssp = CSV.read(in_datafolder("SSP v2 Final Energy - Electricity.csv"), DataFrame) 73 | 74 | # SSP scenarios also include radiative forcing targets, e.g. SSP2-34 75 | # (only for IAM energy system results, not underlying population & GDP scenario) 76 | # We'll hardcode the 3.4 W/m2 scenario variant for now and make it a configurable option later. 77 | scen = uppercase(sspscenario) # e.g. "SSP2-34" 78 | 79 | # We'll take the average result from IMAGE and MESSAGE models. 80 | demandmult2050 = Dict{String, Float64}() 81 | for sspreg in keys(ssp5) 82 | reg = "R5.2$sspreg" 83 | image = ssplookup(ssp, "IMAGE", scen, reg) 84 | message = ssplookup(ssp, "MESSAGE-GLOBIOM", scen, reg) 85 | # Take the year 2020 to represent 2016, then adjust for this difference. 86 | image_mult = image[Symbol(year)] / image[Symbol(2020)] 87 | message_mult = message[Symbol(year)] / message[Symbol(2020)] 88 | demandmult2050[sspreg] = ((image_mult + message_mult)/2) ^ ((year-2016)/(year-2020)) 89 | end 90 | return demandmult2050 91 | end 92 | 93 | function makeregionaldemanddata(gisregion, sspscenario::String, year::Int) 94 | res = 0.01 # resolution of auxiliary datasets [degrees per pixel] 95 | 96 | scenarioyear = "$(sspscenario[1:4])_$year" 97 | datasetinfo = Dict(:gisregion=>gisregion, :scenarioyear=>scenarioyear, :res=>res) 98 | regions, _, regionlist, _, pop, _, _, _, lonrange, latrange = read_datasets(datasetinfo) # pop unit: people/grid cell 99 | 100 | gdp = JLD.load(in_datafolder("gdp_$(scenarioyear).jld"))["gdp"][lonrange,latrange] # unit: USD(2010)/grid cell, PPP 101 | 102 | gadm0regions, _, gadm0regionlist, _, _ = loadregions("Global_GADM0") 103 | countrycodes = gadm0regions[lonrange,latrange] 104 | 105 | lats = (90-res/2:-res:-90+res/2)[latrange] 106 | cellarea = rastercellarea.(lats, res) 107 | popdens = pop ./ cellarea' 108 | 109 | natpop = getnationalpopulation(scenarioyear) # people/GADMregion (vector, length numGADMcountries) 110 | demandpercapita = ieademand()./natpop * 1e6 # MWh/year/capita (vector, length numGADMcountries) 111 | ssp5region = make_sspregionlookup(ssp5) # SSP 5-region names (vector, length numGADMcountries) 112 | demandmult = calcdemandmultipliers(sspscenario, year) # Dict: SSP region name => multiplier 113 | 114 | numreg = length(regionlist) 115 | regionaldemand = zeros(numreg) 116 | regionalpop = zeros(numreg) 117 | regionalgdp = zeros(numreg) 118 | 119 | println("Calculating annual electricity demand for model regions...") 120 | nlons, nlats = size(regions) 121 | updateprogress = Progress(nlats, 1) 122 | for j = 1:nlats 123 | for i = 1:nlons 124 | reg = regions[i,j] 125 | countrycode = countrycodes[i,j] 126 | if reg > 0 && reg != NOREGION && countrycode > 0 127 | sspreg = ssp5region[countrycode] 128 | if isempty(sspreg) 129 | # error("Oops, no SSP region assigned to region $(gadm0regionlist[countrycode]).") 130 | continue 131 | end 132 | regionaldemand[reg] += demandpercapita[countrycode] * pop[i,j]/1e6 * demandmult[sspreg] # TWh/year 133 | regionalpop[reg] += pop[i,j] # unit: people 134 | regionalgdp[reg] += gdp[i,j] # unit: USD(2010) 135 | end 136 | end 137 | next!(updateprogress) 138 | end 139 | regionaldemandpercapita = regionaldemand ./ regionalpop * 1e6 # MWh/year/capita 140 | regionalgdppercapita = regionalgdp ./ regionalpop # USD(2010)/capita 141 | 142 | return regionlist, regionaldemandpercapita, regionalgdppercapita 143 | end 144 | 145 | function buildtrainingdata(; gisregion="Europe8", sspscenario="ssp2-34", sspyear=2050, era_year=2018, numcenters=3, mindist=3.3) 146 | println("\nBuilding training data for $gisregion...") 147 | regionlist, demandpercapita, gdppercapita = 148 | makeregionaldemanddata(gisregion, sspscenario, sspyear) 149 | scenarioyear = "$(sspscenario[1:4])_$sspyear" 150 | hours, temp_popcenters = GIStemp(gisregion, scenarioyear, era_year, numcenters, mindist) 151 | offsets, zone_maxpop, population = regional_timezone_offsets_Jan1(gisregion=gisregion, scenarioyear=scenarioyear, era_year=era_year) 152 | 153 | numreg, numhours = length(regionlist), length(hours) 154 | firsttime = ZonedDateTime.(hours[1], zone_maxpop) 155 | zonedtime = hcat(collect.([firsttime[i]:Hour(1):firsttime[i]+Hour(8759) for i = 1:numreg])...)[:] 156 | 157 | println("\nShifting hourly temperatures from UTC to local time...") 158 | temperature_top3_mean = dropdims(mean(temp_popcenters, dims=3), dims=3) 159 | quantiles = hcat([quantile(temp_popcenters[:,c,1], [0.05, 0.5, 0.95]) for c = 1:numreg]...) 160 | shifted_temperature_top3_mean = similar(temperature_top3_mean) 161 | shifted_temp1 = similar(temperature_top3_mean) 162 | for r = 1:numreg 163 | shifted_temperature_top3_mean[:,r] = circshift(temperature_top3_mean[:,r], round(Int, offsets[r])) 164 | shifted_temp1[:,r] = circshift(temp_popcenters[:,r,1], round(Int, offsets[r])) 165 | end 166 | 167 | # dataframe with hourly data 168 | df_time = DataFrame( 169 | localtime = DateTime.(zonedtime, Local), 170 | country = repeat(string.(regionlist), inner=numhours), 171 | temp_top3 = shifted_temperature_top3_mean[:], 172 | temp1 = shifted_temp1[:], 173 | localhour = hour.(zonedtime), 174 | month = month.(zonedtime), 175 | weekend01 = Int.(dayofweek.(zonedtime) .>= 6) 176 | ) 177 | 178 | # sort by average monthly temperature (in popcenter 1), store rank in ranked_month 179 | df_monthlytemp = combine(groupby(df_time, [:country, :month]), :temp1 => mean => :temp_monthly) |> 180 | d -> sort!(d, [:country, :temp_monthly]) |> 181 | d -> insertcols!(d, 4, :ranked_month => repeat(1:12, outer=numreg)) 182 | 183 | # dataframe with regional data 184 | df_reg = DataFrame(country=string.(regionlist), demandpercapita=demandpercapita, gdppercapita=gdppercapita, 185 | temp1_qlow=quantiles[1,:], temp1_mean=quantiles[2,:], temp1_qhigh=quantiles[3,:]) 186 | 187 | # join everything together 188 | df = innerjoin(df_time, df_monthlytemp, on=[:country, :month]) |> 189 | d -> innerjoin(d, df_reg, on=:country) 190 | return df, offsets, population 191 | end 192 | 193 | function loadtrainingdata() 194 | filename = in_datafolder("syntheticdemand_trainingdata.csv") 195 | if !isfile(filename) 196 | savetrainingdata() 197 | end 198 | df_train = CSV.read(filename, DataFrame) 199 | offsets = CSV.read(in_datafolder("syntheticdemand_timezoneoffsets.csv"), DataFrame)[:,1] 200 | return df_train, offsets 201 | end 202 | 203 | function savetrainingdata(; numcenters=3, mindist=3.3) 204 | create_scenario_datasets("SSP2", 2020) 205 | println("\nCreating training dataset for synthetic demand...") 206 | println("(This requires ERA5 temperature data for the year 2015 and scenario datasets for SSP2 2020.)") 207 | df_train, offsets, _ = buildtrainingdata(gisregion="SyntheticDemandRegions", 208 | sspscenario="SSP2-34", sspyear=2020, era_year=2015, 209 | numcenters=numcenters, mindist=mindist) 210 | CSV.write(in_datafolder("syntheticdemand_timezoneoffsets.csv"), DataFrame(offsets=offsets)) 211 | CSV.write(in_datafolder("syntheticdemand_trainingdata.csv"), df_train) 212 | end 213 | 214 | loaddemanddata() = CSV.read(in_datafolder("syntheticdemand_demanddata.csv"), DataFrame) 215 | 216 | function regional_timezone_offsets_Jan1(; gisregion="Europe8", scenarioyear="ssp2_2050", era_year=2018) 217 | println("\nCalculating population-weighted regional time zone offsets...") 218 | regions, offshoreregions, regionlist, lonrange, latrange = loadregions(gisregion) 219 | tzindices, tznames = loadtimezones(lonrange, latrange) 220 | popscale = 1e5 221 | pop = JLD.load(in_datafolder("population_$scenarioyear.jld"), "population")[lonrange,latrange] ./ popscale # scale down for better precision 222 | numreg = length(regionlist) 223 | numhours = 24*daysinyear(era_year) 224 | offsets = zeros(numreg) 225 | population = zeros(numreg) 226 | zone_maxpop = fill(TimeZone("Europe/London"), numreg) 227 | updateprogress = Progress(numreg, 1) 228 | for r = 1:numreg 229 | reg = (regions .== r) 230 | regindices = tzindices[reg] 231 | regpop = pop[reg] 232 | zoneindices = unique(regindices) 233 | weightedoffset = 0.0 234 | zones = TimeZone[] 235 | pops = Float64[] 236 | for idx in zoneindices 237 | tzname = tznames[idx] 238 | zone = tzname[1:3] == "Etc" ? TimeZone(tzname, TimeZones.Class(:LEGACY)) : TimeZone(tzname) 239 | push!(zones, zone) 240 | firsthour = ZonedDateTime(DateTime(era_year,1,1,0), zone) 241 | hours = firsthour : Hour(1) : firsthour + Hour(numhours-1) 242 | offset = tzoffset(hours[1]) 243 | localpop = sum(regpop[regindices.==idx]) 244 | push!(pops, localpop) 245 | weightedoffset += localpop * offset 246 | end 247 | _, i = findmax(pops) # find time zone with the most population 248 | zone_maxpop[r] = zones[i] 249 | population[r] = sum(pops) * popscale # scale up again 250 | offsets[r] = weightedoffset / sum(pops) 251 | next!(updateprogress) 252 | end 253 | 254 | return offsets, zone_maxpop, population 255 | end 256 | 257 | function saveVilledemand() 258 | vv = CSV.read(in_datafolder("syntheticdemand", "data", "df_model_features.csv"), DataFrame) 259 | hours = DateTime(2015, 1, 1, 0) : Hour(1) : DateTime(2015, 12, 31, 23) 260 | demand1D = hcat(DataFrame(hours=repeat(hours, outer=44)), select(vv, [:country, :demand_total_mwh])) 261 | demand2D = unstack(demand1D, :country, :demand_total_mwh) 262 | rename!(demand2D, Dict("Bosnia and Herz." => "Bosnia and Herzegovina", 263 | "Czech Rep." => "Czech Republic", 264 | "Korea" => "South Korea")) 265 | select!(demand2D, [:hours; sort(names(demand2D)[2:end])]) 266 | 267 | dem2 = stack(demand2D, variable_name=:country, value_name=:demand_MW) 268 | select!(dem2, [3,1,2]) 269 | meandemand = by(dem2, :country, meandemand = :demand_MW => mean) 270 | demand = join(dem2, meandemand, on=:country) 271 | 272 | insertcols!(demand, 5, normdemand=demand[:,:demand_MW]./demand[:,:meandemand]) 273 | CSV.write(in_datafolder("syntheticdemand_demanddata.csv"), demand) 274 | end 275 | 276 | tzoffset(tz::FixedTimeZone) = tz.offset.std.value / 3600 277 | tzoffset(tz::VariableTimeZone) = tzoffset(tz.transitions[end].zone) 278 | tzoffset(dt::ZonedDateTime) = TimeZones.value(dt.zone.offset) / 3600 279 | 280 | function timezonetest() 281 | # zones = TimeZone.(["Europe/London", "Europe/Stockholm", "Europe/Helsinki", "Europe/Minsk"]) 282 | zones = TimeZone.(["Australia/Perth", "Australia/Adelaide", "Australia/Darwin", "Australia/Brisbane", "Australia/Sydney"]) 283 | # all hours in 2015 for each zone in local time 284 | hours = hcat([ZonedDateTime(DateTime(2015,1,1,0), z) : Hour(1) : ZonedDateTime(DateTime(2015,12,31,23), z) for z in zones]...) 285 | # shift hours by the time zone offset to ensure simultaneous rows 286 | # take the offset of ZonedDateTime in hour 1 (in case hour 1 is DST), not the UTC time zone offset 287 | shifted = hcat([circshift(hours[:,i], round(Int, -tzoffset(hours[1,i]))) for i = 1:length(zones)]...) 288 | # convert to UTC and ensure all columns identical (except last 24 hours because of shifted hours from previous year) 289 | utctime = DateTime.(shifted, UTC) 290 | # @assert all([all(utctime[1:end-24,1] .== utctime[1:end-24,i]) for i=2:length(zones)]) # doesn't work for non-integer time offsets 291 | # show the local time in each zone around the shift to summer time (daylight savings) in the EU 292 | localtime = Time.(DateTime.(shifted, Local)) 293 | # display(localtime[2085:2095,:]) # EU 294 | display(localtime[6615:6620,:]) # Australia 295 | return hours, shifted, utctime, localtime 296 | end 297 | -------------------------------------------------------------------------------- /src/GIShydro.jl: -------------------------------------------------------------------------------- 1 | # using CSV, Dates, HDF5, Statistics, Parameters 2 | 3 | hydrooptions() = Dict( 4 | :gisregion => "Europe8", # "Europe8", "Eurasia38", "Scand3" 5 | 6 | :costclasses_min => [ 0, 50, 100], # US $/MWh 7 | :costclasses_max => [50, 100, 9999], 8 | 9 | :storageclasses_min => [ 0, 1e-6, 12], # weeks (discharge time) 10 | :storageclasses_max => [1e-6, 12, 9e9] 11 | ) 12 | 13 | mutable struct HydroOptions 14 | gisregion ::String 15 | costclasses_min ::Vector{Float64} 16 | costclasses_max ::Vector{Float64} 17 | storageclasses_min ::Vector{Float64} 18 | storageclasses_max ::Vector{Float64} 19 | end 20 | 21 | HydroOptions() = HydroOptions("",[],[],[],[]) 22 | 23 | function HydroOptions(d::Dict{Symbol,Any}) 24 | options = HydroOptions() 25 | for (key,val) in d 26 | setproperty!(options, key, val) 27 | end 28 | return options 29 | end 30 | 31 | function getregion(lon, lat, regions, lonrange=1:36000, latrange=1:18000) 32 | res = 0.01 33 | res2 = res/2 34 | lons = (-180+res2:res:180-res2)[lonrange] # longitude values (pixel center) 35 | lats = (90-res2:-res:-90+res2)[latrange] # latitude values (pixel center) 36 | flon = findfirst(round(lon-res2, digits=5) .< lons .<= round(lon+res2, digits=5)) 37 | flat = findfirst(round(lat-res2, digits=5) .< lats .<= round(lat+res2, digits=5)) 38 | (flon == nothing || flat == nothing) && return 0 39 | # println(lons[flon],", ",lats[flat]) 40 | # println(flon, " ", flat) 41 | if regions[flon, flat] > 0 42 | return regions[flon, flat] 43 | else 44 | return maximum(regions[max(1,flon-2):min(length(lons),flon+2), max(1,flat-2):min(length(lats),flat+2)]) 45 | end 46 | end 47 | 48 | 49 | function GIShydro(; optionlist...) 50 | options = HydroOptions(merge(hydrooptions(), optionlist)) 51 | 52 | potential, existing, hydroplants, WECcapacity, WECpotential = readhydrodatabases() 53 | 54 | dayspermonth, nordicprofile, eleccost, capacity, reservoirenergy, dischargetime, monthlyinflow = 55 | potentialhydrovars(potential) 56 | 57 | countries, countrynames, regions, regionlist, lonrange, latrange = read_countries_and_regions(options) 58 | 59 | existingcapac, existinginflowcf = 60 | existinginflow(existing, countries, countrynames, regions, regionlist, lonrange, latrange, hydroplants, WECcapacity, 61 | dayspermonth) 62 | 63 | potentialcapac, potentialinflowcf, potentialmeancost, potentialmeandischargetime = 64 | potentialinflow(options, potential, capacity, regions, regionlist, lonrange, latrange, monthlyinflow, eleccost, 65 | dischargetime, dayspermonth) 66 | 67 | println("\nSaving...") 68 | 69 | @unpack gisregion = options 70 | mkpath(in_datafolder("output")) 71 | matopen(in_datafolder("output", "GISdata_hydro_$gisregion.mat"), "w") do file 72 | write(file, "existingcapac", existingcapac) 73 | write(file, "existinginflowcf", existinginflowcf) 74 | write(file, "potentialcapac", potentialcapac) 75 | write(file, "potentialinflowcf", potentialinflowcf) 76 | write(file, "potentialmeancost", potentialmeancost) 77 | write(file, "potentialmeandischargetime", potentialmeandischargetime) 78 | end 79 | nothing 80 | end 81 | 82 | 83 | function readhydrodatabases() 84 | println("\nReading hydro databases...") 85 | 86 | # lat,lon,COE,Production_GWh,Lake_surface_m2,Lake_volume_m3, 87 | # Qm1,Qm2,Qm3,Qm4,Qm5,Qm6,Qm7,Qm8,Qm9,Qm10,Qm11,Qm12,Qm13,ContID,BasinID,SysID,CapCost 88 | # original headers: 89 | # lat,lon,COE ($/kWh),Production (GWh),Lake surface (m2),Lake volume (m3), 90 | # Qm1,Qm2,Qm3,Qm4,Qm5,Qm6,Qm7,Qm8,Qm9,Qm10,Qm11,Qm12,Qm13,ContID,BasinID, 91 | # SysID (1=DiversionalCanalPower/2=RiverPower), CapCost ($/kW) 92 | potential = DataFrame(CSV.File(in_datafolder("Hydro database (Gernaat) - potential.csv"))) 93 | 94 | # GrandID,lat,lon,Production_kWh,m1,m2,m3,m4,m5,m6,m7,m8,m9,m10,m11,m12,m13 95 | # original headers: 96 | # GrandID,lat,lon,Production_GWh,m1,m2,m3,m4,m5,m6,m7,m8,m9,m10,m11,m12,m13 97 | existing = DataFrame(CSV.File(in_datafolder("Hydro database (Gernaat) - existing (GRanD).csv"))) 98 | 99 | # country,country_long,name,gppd_idnr,capacity_mw,latitude,longitude,fuel1,fuel2,fuel3,fuel4,commissioning_year,owner,source,url,geolocation_source,year_of_capacity_data,generation_gwh_2013,generation_gwh_2014,generation_gwh_2015,generation_gwh_2016,estimated_generation_gwh 100 | elecplants = DataFrame(CSV.File(in_datafolder("WRI - Global Power Plant Database v1.10", "global_power_plant_database.csv")), copycols=true) 101 | 102 | # clean up wrong coordinates 103 | wrong = findall((elecplants.fuel1 .== "Hydro") .& ( 104 | (elecplants.latitude.>90) .| (elecplants.latitude.<-90) .| (elecplants.longitude.>180) .| (elecplants.longitude.<-180) 105 | )) 106 | # elecplants.url[wrong] # 8 results, check urls online 107 | # 1: DDMMSS in lat and lon 108 | # 2-8: lat & lon flipped 109 | w = wrong[1] 110 | elecplants.latitude[w], elecplants.longitude[w] = dms2deg(elecplants.latitude[w]), dms2deg(elecplants.longitude[w]) 111 | w = wrong[2:8] 112 | elecplants.latitude[w], elecplants.longitude[w] = elecplants.longitude[w], elecplants.latitude[w] 113 | wrong = findall((elecplants.fuel1 .== "Hydro") .& ( 114 | (elecplants.latitude.>90) .| (elecplants.latitude.<-90) .| (elecplants.longitude.>180) .| (elecplants.longitude.<-180) 115 | )) 116 | if !isempty(wrong) 117 | error("Cleanup of power plant database coordinates failed.") 118 | end 119 | 120 | # capacity_mw,latitude,longitude 121 | hydroplants = filter(r -> !ismissing(r.fuel1) && r.fuel1 == "Hydro", elecplants)[:,5:7] 122 | 123 | # Country, Capacity_MW, Pumped_MW, Other_MW, Generation_GWh, Generation_BP_GWh % [0 = no data] 124 | # original headers: 125 | # Country, Total Hydropower Capacity (MW) in 2015, Pumped Storage Capacity (MW) in 2015, Excluding Pumped Storage (MW) in 2015, Estimated Net Hydropower Generation (GWh) in 2015, Consumption of Hydroelectricity (GWh) in 2015 (BP 2016) 126 | WECcapacity = CSV.File(in_datafolder("WEC hydro capacity 2015.csv")) |> DataFrame 127 | 128 | # Country, Undeveloped_GWh, Potential_GWh, Utilisation 129 | # original headers: 130 | # Country, Undeveloped (GWh/year), Total Potential (GWh/year), Current Utilisation (%) 131 | WECpotential = CSV.File(in_datafolder("WEC hydro potentials.csv")) |> DataFrame 132 | 133 | return potential, existing, hydroplants, WECcapacity, WECpotential 134 | end 135 | 136 | 137 | function potentialhydrovars(potential) 138 | dayspermonth = [Dates.daysinmonth(Date("2018-$m")) for m = 1:12] 139 | 140 | # No data on Nordic sites (hydro data limited to 60 degrees north), so take Nordic monthly inflow from a European model instead. 141 | nordicprofile = [2224, 1746, 2032, 4089, 15163, 23469, 16126, 9740, 9026, 9061, 5438, 3544] 142 | 143 | production = potential[:, :Production_GWh] # GWh 144 | profile = Matrix(potential[:, 7:18]) # m3/s (columns Qm1-Qm12) 145 | eleccost = potential[:, :COE] # $/kWh 146 | capcost = potential[:, :CapCost] # $/kW 147 | type = potential[:, :SysID] 148 | reservoirsize = potential[:, :Lake_volume_m3] / 1e6 # Mm3 149 | 150 | cfriver = mean(profile, dims=2) ./ maximum(profile, dims=2) 151 | crf = CRF(0.1, 40) # Gernaat assumes 10% discount rate and 40 year lifetime 152 | cf = capcost * crf/8760 ./ eleccost 153 | effic = 0.9*(type .== 1) .+ 0.7*(type .== 2) 154 | capacity = production ./ cf * 1000/8760 # MW 155 | sortedflow = sort(profile, dims=2) # m3/s 156 | designflow = sortedflow[:, 9] # m3/s 157 | waterdensity = 997 # kg/m3 158 | grav = 9.81 # m/s2 159 | 160 | # My estimate of fallheight using eq 2-3 in the Gernaat paper. 161 | # Maybe ask for a new version of the database with this field included. 162 | fallheight = production*1e9/8760 ./ cf ./ effic ./ (designflow*waterdensity*grav) # m 163 | 164 | # remove outliers 165 | f = findall(fallheight .> 2000) 166 | fallheight[f] .= 2000 167 | production[f] = fallheight[f] .* cf[f] .* effic[f] .* designflow[f] * waterdensity*grav*8760/1e9 168 | capacity[f] = production[f] ./ cf[f] * 1000/8760 169 | eleccost[f] = eleccost[f] .* potential[f, :Production_GWh] ./ production[f] 170 | capcost[f] = capcost[f] .* potential[f, :Production_GWh] ./ production[f] 171 | 172 | # J = kg m2/s2 173 | # m * m3/s * kg/m3 * m/s2 = J/s = W 174 | energyprofile = fallheight * dayspermonth' * 24/1e9 .* profile * waterdensity*grav # GWh/month 175 | waterenergy = sum(energyprofile, dims=2) # GWh/year 176 | 177 | # m * Mm3 * kg/m3 * m/s2 = MJ 178 | reservoirenergy = fallheight .* reservoirsize * waterdensity*grav / 3600 / 1000 # GWh 179 | dischargetime = reservoirenergy ./ capacity * 1000 # h 180 | # Matlab plots: figure; hist(log10(reservoirenergy(reservoirenergy>0)),1000) 181 | # hist(log10(dischargetime(reservoirenergy>0)),1000) 182 | 183 | # Try calculating a monthly cf based on water inflow and turbine size. 184 | # Maybe not always accurate for large reservoirs since they can store between months, 185 | # but should work for run-of-river. Annual production and CF should still be fine. 186 | # (But why is my production estimate sometimes much higher than Gernaat's?) 187 | monthlyinflow = energyprofile .* effic 188 | monthlyprod = min.(monthlyinflow, capacity * dayspermonth' * 24/1000) # GWh/month 189 | 190 | return dayspermonth, nordicprofile, eleccost, capacity, reservoirenergy, dischargetime, monthlyinflow 191 | end 192 | 193 | 194 | function read_countries_and_regions(options) 195 | println("Reading Global GADM country-level database...") 196 | countries, _, countrynames, _, _ = loadregions("Global_GADM0") 197 | numcountries = length(countrynames) 198 | 199 | @unpack gisregion = options 200 | println("Reading regions for $gisregion...") 201 | regions, offshoreregions, regionlist, lonrange, latrange = loadregions(gisregion) 202 | # bbox = [-90 -180; 90 180] 203 | 204 | return countries, countrynames, regions, regionlist, lonrange, latrange 205 | end 206 | 207 | 208 | function existinginflow(existing, countries, countrynames, regions, regionlist, lonrange, latrange, hydroplants, WECcapacity, 209 | dayspermonth) 210 | println("\nCalculate monthly inflow for existing hydro..."); 211 | 212 | existingprofile = Matrix(existing[:, 5:16]) # m3/s (columns m1-m12) 213 | 214 | numcountries = length(countrynames) 215 | capacwri = zeros(numcountries,1) # GW 216 | annualcf = zeros(numcountries,1) 217 | scalefactor_capacwri = zeros(numcountries,1) 218 | 219 | for i = 1:size(hydroplants,1) 220 | country = getregion(hydroplants[i,:longitude], hydroplants[i,:latitude], countries) 221 | i == 4867 && continue # weird data point in the Indian Ocean 222 | if country > 0 && country != NOREGION 223 | capacwri[country] += hydroplants[i,:capacity_mw] / 1e3 # GW 224 | else 225 | println("Can't identify country: $(hydroplants[i,:])") 226 | end 227 | end 228 | 229 | # calculate scale factor for national hydro capacity (WRI capac/WEC capac) 230 | # calculate annual cf for each country from WEC data 231 | for i = 1:size(WECcapacity,1)-1 # last row is "World" 232 | cty = findfirst(countrynames .== Symbol(WECcapacity[i,:Country])) 233 | cty == nothing && error("Can't find country name: $(Symbol(WECcapacity[i,:Country]))") 234 | scalefactor_capacwri[cty] = capacwri[cty] / WECcapacity[i,:Capacity_MW] * 1000 235 | annualcf[cty] = WECcapacity[i,:Generation_GWh] / WECcapacity[i,:Capacity_MW] * 1000/8760 236 | end 237 | 238 | numreg = length(regionlist) 239 | existingcapac = zeros(numreg,1) # GW 240 | existinginflow = zeros(numreg,12) # GWh/month 241 | 242 | # calculate annual generation at each site using capacity and scale factor & CF from above 243 | # then get monthly inflow profile from nearest site in Gernaat/Grand 244 | # finally calculate monthly inflow for existing sites 245 | for i = 1:size(hydroplants,1) 246 | cty = getregion(hydroplants[i,:longitude], hydroplants[i,:latitude], countries) 247 | reg = getregion(hydroplants[i,:longitude], hydroplants[i,:latitude], regions, lonrange, latrange) 248 | (reg == 0 || reg == NOREGION || cty == 0 || cty == NOREGION) && continue 249 | scalefactor = scalefactor_capacwri[cty] > 0 ? scalefactor_capacwri[cty] : 1.0 250 | existingcapacity = hydroplants[i,:capacity_mw] / 1e3 / scalefactor # GW 251 | annualgeneration = existingcapacity * annualcf[cty] * 8760 # GWh/year 252 | existingcapac[reg] += existingcapacity 253 | 254 | # approx distance in km to all existing plants in Gernaat/Grand 255 | dist = 111*sqrt.((existing[:,:lon] .- hydroplants[i,:longitude]).^2 + (existing[:,:lat] .- hydroplants[i,:latitude]).^2) 256 | d, ndx = findmin(dist) 257 | if regionlist[reg] == "NOR" 258 | inflowprofile = nordicprofile 259 | else 260 | inflowprofile = existingprofile[ndx,:] 261 | end 262 | monthlyprofile = inflowprofile .* dayspermonth 263 | monthlyprofile = monthlyprofile / sum(monthlyprofile) 264 | 265 | monthlygeneration = annualgeneration * monthlyprofile # GWh/month 266 | existinginflow[reg,:] += monthlygeneration 267 | end 268 | 269 | existinginflowcf = existinginflow ./ existingcapac ./ dayspermonth' / 24 270 | 271 | return existingcapac, existinginflowcf 272 | end 273 | 274 | 275 | function potentialinflow(options, potential, capacity, regions, regionlist, lonrange, latrange, monthlyinflow, eleccost, 276 | dischargetime, dayspermonth) 277 | println("Calculate monthly inflow for potential hydro...") 278 | 279 | @unpack costclasses_min, costclasses_max, storageclasses_min, storageclasses_max, gisregion = options 280 | 281 | ncostclasses = length(costclasses_min) 282 | nstorageclasses = length(storageclasses_min) 283 | 284 | numreg = length(regionlist) 285 | potentialcapac = zeros(numreg,ncostclasses,nstorageclasses) # GW 286 | potentialinflow = zeros(numreg,ncostclasses,nstorageclasses,12) # GWh/month 287 | 288 | potentialmeancost = zeros(numreg,ncostclasses,nstorageclasses) # $/kWh 289 | potentialmeandischargetime = zeros(numreg,ncostclasses,nstorageclasses) # hours 290 | nobservations = zeros(numreg,ncostclasses,nstorageclasses) 291 | 292 | for i = 1:size(potential,1) 293 | reg = getregion(potential[i,:lon], potential[i,:lat], regions, lonrange, latrange) 294 | (reg == 0 || reg == NOREGION) && continue 295 | 296 | weeks = dischargetime[i] / 168 297 | storageclass = findfirst((weeks .>= storageclasses_min) .& (weeks .< storageclasses_max)) 298 | cost = eleccost[i]*1000 # US $/MWh 299 | costclass = findfirst((cost .>= costclasses_min) .& (cost .< costclasses_max)) 300 | 301 | if regionlist[reg] == "NOR" 302 | inflowprofile = nordicprofile/sum(nordicprofile) * sum(monthlyinflow[i,:]) 303 | else 304 | inflowprofile = monthlyinflow[i,:] 305 | end 306 | 307 | potentialcapac[reg,costclass,storageclass] += capacity[i]/1000 308 | potentialinflow[reg,costclass,storageclass,:] += inflowprofile 309 | 310 | potentialmeancost[reg,costclass,storageclass] += eleccost[i] 311 | potentialmeandischargetime[reg,costclass,storageclass] += min(8760, dischargetime[i]) 312 | nobservations[reg,costclass,storageclass] += 1 313 | end 314 | 315 | potentialinflowcf = potentialinflow ./ potentialcapac ./ reshape(dayspermonth, (1,1,1,12)) / 24 316 | potentialmeancost = potentialmeancost ./ nobservations 317 | potentialmeandischargetime = potentialmeandischargetime ./ nobservations 318 | 319 | return potentialcapac, potentialinflowcf, potentialmeancost, potentialmeandischargetime 320 | end 321 | 322 | # function mapurl(latlon1,latlon2) 323 | # url = "https://www.google.com/maps/dir/?api=1&origin=$(latlon1[1]),$(latlon1[2])" * 324 | # "&destination=$(latlon2[1]),$(latlon2[2])&travelmode=walking" 325 | # end 326 | -------------------------------------------------------------------------------- /src/helperfunctions.jl: -------------------------------------------------------------------------------- 1 | # bbox is a "bounding box" representing outer limits of latitude & longitude: [bottomlat leftlon; toplat rightlon] 2 | # rasterdensity is number of pixels per degree, so rasterdensity = 1/res with res = raster resolution measured in degrees 3 | # TO DO: make these consistent with lon, lat convention in rest of code, use resolution instead of rasterdensity 4 | function bbox2ranges(bbox, rasterdensity) 5 | latindexes, lonindexes = bbox2indexes(bbox, rasterdensity) 6 | latindex = latindexes[1] : latindexes[2] 7 | lonindex = lonindexes[1] : lonindexes[2] 8 | return latindex, lonindex 9 | end 10 | 11 | function bbox2indexes(bbox, rasterdensity) 12 | latindexes = round.(Int, reverse(rasterdensity*(90 .- bbox[:,1]), dims=1) + [1 0]') 13 | lonindexes = round.(Int, rasterdensity*(bbox[:,2] .- (-180)) + [1 0]') 14 | return latindexes, lonindexes 15 | end 16 | 17 | function roundbbox(bbox, rasterdensity) 18 | newbbox = zeros(2,2) 19 | newbbox[1,:] = floor.(bbox[1,:]*rasterdensity)/rasterdensity 20 | newbbox[2,:] = ceil.(bbox[2,:]*rasterdensity)/rasterdensity 21 | return newbbox 22 | end 23 | 24 | filterrange(range, lowhigh) = range[(range .>= lowhigh[1]) .& (range .<= lowhigh[2])] 25 | 26 | function getlats(bbox, rasterdensity, halfshift) 27 | bottomlat, toplat = bbox[:,1] 28 | res = 1/rasterdensity 29 | shift = halfshift ? res/2 : 0.0 30 | return bottomlat + shift : res : toplat - shift 31 | # return toplat - shift : -res : bottomlat + shift 32 | end 33 | 34 | function getlons(bbox, rasterdensity, halfshift) 35 | leftlon, rightlon = bbox[:,2] 36 | res = 1/rasterdensity 37 | shift = halfshift ? res/2 : 0.0 38 | return leftlon + shift : res : rightlon - shift 39 | end 40 | 41 | # Boolean function, true for a leap year (for Gregorian calendar, assumes year >= 1583). 42 | # The Gregorian calendar has 97 leap years every 400 years: 43 | # Every year divisible by 4 is a leap year. 44 | # However, every year divisible by 100 is not a leap year. 45 | # However, every year divisible by 400 is a leap year after all. 46 | # So, 1700, 1800, 1900, 2100, and 2200 are not leap years, but 1600, 2000, and 2400 are leap years. 47 | leapyear(year) = divisible(year,4) && (!divisible(year,100) || divisible(year,400)) 48 | divisible(x,y) = x % y == 0 49 | 50 | 51 | function meshgrid2d(x,y) 52 | xx = repeat(x', length(y), 1) 53 | yy = repeat(y, 1, length(x)) 54 | return xx, yy 55 | end 56 | 57 | # Borrowed from Matlab, see fspecial('disk', rad) 58 | function diskfilterkernel(rad) 59 | crad = ceil(Int, rad-0.5) 60 | x,y = meshgrid2d(-crad:crad,-crad:crad) 61 | maxxy = max.(abs.(x),abs.(y)) 62 | minxy = min.(abs.(x),abs.(y)) 63 | m1 = (rad^2 .< (maxxy .+ 0.5).^2 + (minxy .- 0.5).^2) .* (minxy .- 0.5) + 64 | (rad^2 .>= (maxxy .+ 0.5).^2 + (minxy .- 0.5).^2) .* sqrt.(max.(0, rad^2 .- (maxxy .+ 0.5).^2)) 65 | m2 = (rad^2 .> (maxxy .- 0.5).^2 + (minxy .+ 0.5).^2) .* (minxy .+ 0.5) + 66 | (rad^2 .<= (maxxy .- 0.5).^2 + (minxy .+ 0.5).^2) .* sqrt.(max.(rad^2 .- (maxxy .- 0.5).^2)) 67 | sgrid = (rad^2 * (0.5*(asin.(m2/rad) .- asin.(m1/rad)) + 0.25*(sin.(2*asin.(m2/rad)) - sin.(2*asin.(m1/rad)))) + 68 | - (maxxy .- 0.5).*(m2-m1) + (m1 .- minxy .+ 0.5)) .* 69 | ((((rad^2 .< (maxxy .+ 0.5).^2 + (minxy .+ 0.5).^2) .& 70 | (rad^2 .> (maxxy .- 0.5).^2 + (minxy .- 0.5).^2)) .| 71 | ((minxy .== 0) .& (maxxy .- 0.5 .< rad) .& (maxxy .+ 0.5 .>= rad)))) 72 | sgrid = sgrid .+ ((maxxy .+ 0.5).^2 + (minxy .+ 0.5).^2 .< rad^2) 73 | sgrid[crad+1,crad+1] = min.(pi*rad^2, pi/2) 74 | if ((crad>0) && (rad > crad-0.5) && (rad^2 < (crad-0.5)^2+0.25)) 75 | m1a = sqrt(rad^2 - (crad - 0.5).^2) 76 | m1n = m1a/rad 77 | sg0 = 2*(rad^2*(0.5*asin(m1n) + 0.25*sin(2*asin(m1n)))-m1a*(crad-0.5)) 78 | sgrid[2*crad+1,crad+1] = sg0 79 | sgrid[crad+1,2*crad+1] = sg0 80 | sgrid[crad+1,1] = sg0 81 | sgrid[1,crad+1] = sg0 82 | sgrid[2*crad,crad+1] = sgrid[2*crad,crad+1] - sg0 83 | sgrid[crad+1,2*crad] = sgrid[crad+1,2*crad] - sg0 84 | sgrid[crad+1,2] = sgrid[crad+1,2] - sg0 85 | sgrid[2,crad+1] = sgrid[2,crad+1] - sg0 86 | end 87 | sgrid[crad+1,crad+1] = min(sgrid[crad+1,crad+1], 1) 88 | h = centered(sgrid/sum(sgrid)) 89 | end 90 | 91 | 92 | meandrop(x; dims=dims) = dropdims(mean(x, dims=dims), dims=dims) 93 | sumdrop(x; dims=dims) = dropdims(sum(x, dims=dims), dims=dims) 94 | 95 | shifthalfcell(data, kernel=centered([.25 .25; .25 .25])) = imfilter(data, kernel) 96 | 97 | function shiftallcells!(windCF) 98 | yearlength = size(windCF, 1) 99 | updateprogress = Progress(yearlength, 1) 100 | kernel = centered([.25 .25; .25 .25]) 101 | for i=1:yearlength 102 | windCF[i,:,:] = shifthalfcell(windCF[i,:,:], kernel) 103 | next!(updateprogress) 104 | end 105 | end 106 | 107 | function max!(x::AbstractArray, val) 108 | @inbounds for i in eachindex(x) 109 | x[i] = max(x[i], val) 110 | end 111 | x 112 | end 113 | 114 | selfmap!(f,x) = map!(f,x,x) 115 | 116 | # Apply a function fn to the data matrix by chunks to avoid memory issues 117 | function gridsplit(data::AbstractArray, fn::Function, resulttype::DataType; nmax=9000, overlap=250) 118 | rows, cols = size(data) 119 | nparts = length(1:nmax:rows) * length(1:nmax:cols) 120 | nparts > 1 && println("\nSplitting data matrix into $nparts parts to avoid memory issues...") 121 | result = zeros(resulttype, size(data)) 122 | part = 0 123 | for r = 1:nmax:rows, c = 1:nmax:cols 124 | part += 1 125 | nparts > 1 && println("\nPart $part/$nparts:") 126 | rowrange_in = max(r-overlap, 1):min(r-1+nmax+overlap, rows) 127 | colrange_in = max(c-overlap, 1):min(c-1+nmax+overlap, cols) 128 | rowrange_out = r:min(r-1+nmax, rows) 129 | colrange_out = c:min(c-1+nmax, cols) 130 | rowdelta = rowrange_out[1] - rowrange_in[1] + 1 131 | coldelta = colrange_out[1] - colrange_in[1] + 1 132 | rowrange_delta = rowdelta:(rowdelta + length(rowrange_out) - 1) 133 | colrange_delta = coldelta:(coldelta + length(colrange_out) - 1) 134 | result[rowrange_out,colrange_out] = fn(data[rowrange_in,colrange_in])[rowrange_delta,colrange_delta] 135 | end 136 | return result 137 | end 138 | 139 | # function gridsurface(x, y; color=color, nmax=9000, args...) 140 | # rows, cols = size(color) 141 | # rowchunks, colchunks = ceil(Int, rows/nmax), ceil(Int, cols/nmax) 142 | # rowsplit, colsplit = ceil(Int, rows/rowchunks), ceil(Int, cols/colchunks) 143 | # nparts = rowchunks * colchunks 144 | # nparts > 1 && println("\nSplitting surface plat into $nparts parts to avoid Makie GPU limits...") 145 | # part = 0 146 | # scene = Scene() 147 | # for r = 1:rowsplit:rows, c = 1:colsplit:cols 148 | # part += 1 149 | # nparts > 1 && println("\nPart $part/$nparts:") 150 | # rowrange = r:min(r-1+rowsplit, rows) 151 | # colrange = c:min(c-1+colsplit, cols) 152 | # surface!(x[rowrange,colrange], y[rowrange,colrange]; color=color[rowrange,colrange], args...) 153 | # end 154 | # return scene 155 | # end 156 | 157 | row2lon(row::Int, res) = (row - 0.5) * res - 180 158 | col2lat(col::Int, res) = 90 - (col - 0.5) * res 159 | lon2row(lon::Real, res) = floor(Int, mod(180+lon, 360) / res) + 1 160 | lat2col(lat::Real, res) = floor(Int, min(180-res/2, 90-lat) / res) + 1 161 | 162 | function rowcol2lonlat(rowcol::Tuple{Int,Int}, res) 163 | row, col = rowcol 164 | @assert 1 <= row <= 360/res 165 | @assert 1 <= col <= 180/res 166 | return row2lon(row,res), col2lat(col,res) 167 | end 168 | 169 | function lonlat2rowcol(lonlat::Tuple{<:Real,<:Real}, res) 170 | lon, lat = lonlat 171 | @assert -90 <= lat <= 90 172 | return lon2row(lon,res), lat2col(lat, res) 173 | end 174 | 175 | # convert indexes of datasets with 0.01 degree resolution to indexes of ERA5 resolution (0.28125 degrees) 176 | function eraranges(lonrange, latrange, res, erares) 177 | topleft = rowcol2lonlat((lonrange[1],latrange[1]), res) 178 | bottomright = rowcol2lonlat((lonrange[end],latrange[end]), res) 179 | row1, col1 = lonlat2rowcol(topleft, erares) 180 | row2, col2 = lonlat2rowcol(bottomright, erares) 181 | nlons = round(Int, 360/erares) 182 | eralonranges = row2 >= row1 ? [row1:row2] : [row1:nlons, 1:row2] 183 | eralatrange = col1:col2 184 | return eralonranges, eralatrange 185 | end 186 | 187 | function eraindexlookup(lons, lats, eralonranges, eralatrange) 188 | eralonrange = length(eralonranges) == 1 ? eralonranges[1] : [eralonranges[1]; eralonranges[2]] 189 | lonindexlookup = zeros(Int,1280) 190 | latindexlookup = zeros(Int,640) 191 | for (i,r) in enumerate(eralonrange) 192 | lonindexlookup[r] = i 193 | end 194 | for (i,r) in enumerate(eralatrange) 195 | latindexlookup[r] = i 196 | end 197 | eralonindexes = lonindexlookup[lon2row.(lons,0.28125)] 198 | eralatindexes = latindexlookup[lat2col.(lats,0.28125)] 199 | return eralonindexes, eralatindexes 200 | end 201 | 202 | # Automatically detect the "bounding box" of nonzero data in a matrix. 203 | # Returns a tuple of indexes of the box containing data, (lon,lat) or (row,col). 204 | function getbboxranges(regions::AbstractMatrix, padding::Int=0) 205 | data = (regions .> 0) .& (regions .!= NOREGION) 206 | lonrange = dataindexes_lon(vec(any(data, dims=2)), padding) # all longitudes with region data 207 | latrange = dataindexes_lat(vec(any(data, dims=1)), padding) # all latitudes with region data 208 | return lonrange, latrange 209 | end 210 | 211 | # Given a vector of Bool indicating nonzero data along latitudes, return a range 212 | # of indexes indicating the area where there is data. 213 | # Optionally add some padding elements to the data area (to ensure that there is 214 | # some offshore territory around the regions). 215 | function dataindexes_lat(latdata::AbstractVector, padding::Int=0) 216 | len = length(latdata) 217 | first, last = extrema(findall(latdata)) # first & last indexes of the data region 218 | return max(1, first-padding):min(len, last+padding) # lat indexes of region elements 219 | end 220 | 221 | # Given a vector of Bool indicating nonzero data along longitudes, return a vector 222 | # of indexes indicating the area where there is data. Do this by eliminating the 223 | # longest contiguous sequence of zero data, considering wraparound (at lon=180). 224 | # Optionally add some padding elements to the data area (to ensure that there is 225 | # some offshore territory around the regions). 226 | function dataindexes_lon(londata::AbstractVector, padding::Int=0) 227 | len = length(londata) 228 | seq = longest_circular_sequence(londata, false) # first & last indexes of the longest contiguous sequence of NONregion elements 229 | first, last = seq[2]+1, seq[1]-1 # the rest are region elements (including "holes" in the sequence) 230 | last = last >= first ? last : last + len # make sure last > first so the loop below works 231 | return [mod1(i,len) for i = first-padding:last+padding] # lon indexes of region elements 232 | end 233 | 234 | # Return the start and end indexes of the longest contiguous sequence of v such that v[i] == x, 235 | # allowing the sequence to wrap around the end. 236 | function longest_circular_sequence(v::AbstractVector, x) 237 | longest = Int[] 238 | current = Int[] 239 | haswrapped = false 240 | i = 1 241 | len = length(v) 242 | sequencelength(seq) = isempty(seq) ? 0 : mod(seq[2]-seq[1], len) + 1 243 | while true 244 | if v[i] == x 245 | if isempty(current) 246 | current = [i,i] 247 | else 248 | current[2] = i 249 | end 250 | else 251 | longest = sequencelength(current) > sequencelength(longest) ? current : longest 252 | haswrapped && return longest 253 | current = Int[] 254 | end 255 | if !haswrapped && i==len 256 | haswrapped = true 257 | i = 1 258 | sequencelength(current) == len && return current 259 | else 260 | i += 1 261 | end 262 | end 263 | end 264 | 265 | function coordmap(len::Int, croprange) 266 | coords = zeros(Int, len) 267 | indexes = (1:len)[croprange] 268 | for (i,n) = enumerate(indexes) 269 | coords[n] = i 270 | end 271 | return coords 272 | end 273 | 274 | # area of a grid cell in km2 for a given latitude (in degrees) and raster resolution 275 | rastercellarea(lat, res) = cosd(lat) * (2*6371*π/(360/res))^2 276 | 277 | function eralonlat(options, lonrange, latrange) 278 | @unpack res, erares = options 279 | 280 | lons = (-180+res/2:res:180-res/2)[lonrange] # longitude values (pixel center) 281 | lats = (90-res/2:-res:-90+res/2)[latrange] # latitude values (pixel center) 282 | cellarea = rastercellarea.(lats, res) 283 | 284 | eralonranges, eralatrange = eraranges(lonrange, latrange, res, erares) 285 | eralonrange = length(eralonranges) == 1 ? eralonranges[1] : [eralonranges[1]; eralonranges[2]] 286 | eralons = (-180+erares/2:erares:180-erares/2)[eralonrange] # longitude values (pixel center) 287 | eralats = (90-erares/2:-erares:-90+erares/2)[eralatrange] # latitude values (pixel center) 288 | 289 | lonmap = coordmap(round(Int, 360/res), lonrange) # map full longitude indexes to cropped indexes 290 | latmap = coordmap(round(Int, 180/res), latrange) # ditto latitude 291 | 292 | return eralons, eralats, lonmap, latmap, cellarea 293 | end 294 | 295 | function uncrop(croppedarray, lonrange, latrange, res) 296 | nlon, nlat = round(Int, 360/res), round(Int, 180/res) 297 | full = zeros(eltype(croppedarray), nlon, nlat) 298 | full[lonrange, latrange] = croppedarray 299 | return full 300 | end 301 | 302 | function drawmap(mapdata; scalefactor=(1.0, 1.0), save="", opt...) 303 | skip = ceil(Int, maximum(size(mapdata))/3000) 304 | mirrormap = reverse(mapdata[1:skip:end,1:skip:end], dims=2) 305 | # display(heatmap(mirrormap, size=(1200, 900), c=[cgrad(:viridis)[x] for x in 0.0:0.2:1.0])) 306 | # display(heatmap(mirrormap, size=(1200, 900), c=cgrad(:viridis, [0, 0.5, 1]))) 307 | 308 | # https://discourse.julialang.org/t/how-can-i-save-a-makie-heatmap-as-png-with-one-pixel-per-matrix-element/70579 309 | scene = Scene(camera=campixel!, resolution=size(mirrormap)) 310 | heatmap!(scene, mirrormap; opt...) 311 | scale!(scene, scalefactor...) 312 | !isempty(save) && Makie.save(save, scene, resolution=size(mirrormap).*scalefactor) 313 | return scene 314 | # heatmap(mirrormap; opt...) 315 | # display(countmap(mirrormap[:])) 316 | end 317 | 318 | # I can't believe Makie can't do this more conveniently. 319 | function plotlines(x,y) 320 | scene = Scene() 321 | for i = 1:size(y,2) 322 | lines!(scene, x, y[:,i], color=ColorSchemes.Set1_9[i]) 323 | end 324 | scene 325 | end 326 | 327 | function drawregionmap(regionname) 328 | plotly() 329 | regions, offshoreregions, regionlist, lonrange, latrange = loadregions(regionname) 330 | skip = ceil(Int, maximum(size(mapdata))/3000) 331 | reg = reverse(regions[1:skip:end,1:skip:end]', dims=1) 332 | display(heatmap(reg .+ (reg.>0).*20, size=(1200, 900))) 333 | reg = reverse(offshoreregions[1:4:end,1:4:end]', dims=1) 334 | display(heatmap(reg .+ (reg.>0).*20, size=(1200, 900))) 335 | end 336 | 337 | function resize_categorical(regions, regionlist, lonrange, latrange, erares=0.28125; skipNOREGION=false) 338 | res = 0.01 # resolution of auxiliary datasets [degrees per pixel] 339 | # erares = 0.28125 # resolution of ERA5 datasets [degrees per pixel] 340 | 341 | eralons, eralats, lonmap, latmap, cellarea = eralonlat(Dict(:res=>res, :erares=>erares), lonrange, latrange) 342 | numreg = length(regionlist) 343 | nlons, nlats = length(eralons), length(eralats) 344 | 345 | RegType = eltype(regions) 346 | smallregions = zeros(RegType, nlons, nlats) 347 | count0 = Dict(i => 0 for i = 0:numreg) 348 | count0[NOREGION] = 0 349 | 350 | for (j, eralat) in enumerate(eralats) 351 | colrange = latmap[lat2col(eralat+erares/2, res):lat2col(eralat-erares/2, res)-1] 352 | for (i, eralon) in enumerate(eralons) 353 | # get all high resolution row and column indexes within this ERA5 cell 354 | rowrange = lonmap[lon2row(eralon-erares/2, res):lon2row(eralon+erares/2, res)-1] 355 | count = copy(count0) 356 | foundnonzero = false 357 | for c in colrange, r in rowrange 358 | (c == 0 || r == 0) && continue 359 | reg = regions[r,c] 360 | skipNOREGION && (reg == 0 || reg == NOREGION) && continue 361 | count[reg] += 1 362 | foundnonzero = true 363 | end 364 | smallregions[i,j] = foundnonzero ? findmax(count)[2] : 0 365 | end 366 | end 367 | 368 | return smallregions 369 | end 370 | 371 | function matlab2elin(; gisregion="Europe8", year=2018) 372 | filenamesuffix = "" 373 | _, _, regionlist, _, _ = loadregions(gisregion) 374 | 375 | # CF_pvrooftop, capacity_pvrooftop 376 | region = string.(regionlist) 377 | tech = ["PVplantA", "PVplantB", "PVroof", "CSPA", "CSPB", "WindonshoreA", "WindonshoreB", "Windoffshore"] 378 | classname = ["PVPA", "PVPB", "PVR", "CSPA", "CSPB", "WONA", "WONB", "WOFF"] 379 | capvar = ["capacity_pvplantA", "capacity_pvplantB", "capacity_pvrooftop", "capacity_cspplantA", "capacity_cspplantB", 380 | "capacity_onshoreA", "capacity_onshoreB", "capacity_offshore"] 381 | cfvar = ["CFtime_pvplantA", "CFtime_pvplantB", "CFtime_pvrooftop", "CFtime_cspplantA", "CFtime_cspplantB", 382 | "CFtime_windonshoreA", "CFtime_windonshoreB", "CFtime_windoffshore"] 383 | 384 | winddata = matread(in_datafolder("output", "GISdata_wind$(year)_$gisregion$filenamesuffix.mat")) 385 | solardata = matread(in_datafolder("output", "GISdata_solar$(year)_$gisregion$filenamesuffix.mat")) 386 | data = merge(winddata, solardata) 387 | 388 | # read number of classes from wind & solar GIS output 389 | nwindclasses = [size(winddata[varname], 2) for varname in capvar[6:8]] 390 | nsolarclasses = [size(solardata[varname], 2) for varname in capvar[1:5]] 391 | nclasses = [nsolarclasses; nwindclasses] 392 | 393 | for t = 1:length(tech) 394 | open(in_datafolder("output", "capacity_$(tech[t]).inc"), "w") do f 395 | for (r,reg) in enumerate(region) 396 | for c = 1:nclasses[t] 397 | val = data[capvar[t]][r,c] 398 | !isnan(val) && val > 0 && @printf(f, "%-3s . %s%-2d %12.6f\n", reg, classname[t], c, val) 399 | end 400 | end 401 | end 402 | open(in_datafolder("output", "cf_$(tech[t]).inc"), "w") do f 403 | for (r,reg) in enumerate(region) 404 | for c = 1:nclasses[t] 405 | for h = 1:8760 406 | val = data[cfvar[t]][h,r,c] 407 | !isnan(val) && val > 0 && @printf(f, "%-3s . %s%-2d . h%04d %10.6f\n", reg, classname[t], c, h, val) 408 | end 409 | end 410 | end 411 | end 412 | end 413 | end 414 | 415 | function dms2deg(dms) 416 | sg = sign(dms) 417 | dms = abs(dms) 418 | d = sg*floor(dms/10000) 419 | dms = dms-10000*d 420 | m = floor(dms/100) 421 | return d + m/60 + (dms-100*m)/3600 422 | end 423 | 424 | # Capital recovery factor for investments 425 | CRF(r, T) = r / (1 - 1/(1+r)^T) 426 | 427 | in_datafolder(names...) = joinpath(getconfig("datafolder"), names...) 428 | -------------------------------------------------------------------------------- /src/make_auxiliary_datasets.jl: -------------------------------------------------------------------------------- 1 | export rasterize_datasets, create_scenario_datasets, cleanup_datasets, makeprotected, savelandcover, 2 | createGDP, creategridaccess, getpopulation, getwindatlas 3 | 4 | # cleanup options: :none, :limited, :all 5 | function rasterize_datasets(; cleanup=:all) 6 | rasterize_GADM() 7 | rasterize_NUTS() 8 | rasterize_protected() 9 | downscale_landcover() 10 | savelandcover() 11 | upscale_topography() 12 | saveregions_global() 13 | rasterize_timezones() 14 | maketimezones() 15 | cleanup_datasets(cleanup=cleanup) 16 | end 17 | 18 | function create_scenario_datasets(scen, year) 19 | if !isfile(in_datafolder("population_$(scen)_$year.jld")) 20 | println("\nCreating population dataset for $scen $year...") 21 | downscale_population(scen, year) 22 | end 23 | if !isfile(in_datafolder("gdp_$(scen)_$year.jld")) 24 | println("\nCreating GDP dataset for $scen $year...") 25 | createGDP(scen, year) 26 | end 27 | if !isfile(in_datafolder("gridaccess_$(scen)_$year.jld")) 28 | println("\nCreating grid access dataset for $scen $year...") 29 | creategridaccess(scen, year) 30 | end 31 | end 32 | 33 | # cleanup options: :none, :limited, :all 34 | function cleanup_datasets(; cleanup=:all) 35 | cleanup == :none && return 36 | for i = 0:2 37 | rm(in_datafolder("WDPA", "protected_raster$i.tif"), force=true) 38 | rm(in_datafolder("WDPA", "protectedfields$i.csv"), force=true) 39 | rm(in_datafolder("WDPA", "protected.jld$i"), force=true) 40 | rm(in_datafolder("WDPA", "WDPA-shapefile$i"), force=true, recursive=true) 41 | end 42 | rm(in_datafolder("landcover.tif"), force=true) 43 | rm(in_datafolder("topography.tif"), force=true) 44 | rm(in_datafolder("timezones.tif"), force=true) 45 | rm(in_datafolder("timezone_names.csv"), force=true) 46 | if cleanup == :all 47 | rm(in_datafolder("Landcover - USGS MODIS.tif"), force=true) 48 | rm(in_datafolder("ETOPO1_Ice_c_geotiff.tif"), force=true) 49 | rm(in_datafolder("gadm36"), force=true, recursive=true) 50 | rm(in_datafolder("nuts2016-level3"), force=true, recursive=true) 51 | rm(in_datafolder("WDPA"), force=true, recursive=true) 52 | rm(in_datafolder("timezones-with-oceans.shapefile"), force=true, recursive=true) 53 | end 54 | end 55 | 56 | function rasterize_GADM() 57 | println("\nRasterizing GADM shapefile for global administrative areas (1-10 minute run time)...") 58 | shapefile = in_datafolder("gadm36", "gadm36.shp") 59 | outfile = in_datafolder("gadm.tif") 60 | options = "-a UID -ot Int32 -tr 0.01 0.01 -te -180 -90 180 90 -co COMPRESS=LZW" 61 | # options = "-a UID -ot Int32 -tr 0.02 0.02 -te -180 -90 180 90 -co COMPRESS=LZW" 62 | @time rasterize(shapefile, outfile, split(options, ' ')) 63 | 64 | println("Creating .csv file for regional index and name lookup...") 65 | sql = "select uid,name_0,name_1,name_2 from gadm36" 66 | # sql = "select uid,id_0,name_0,id_1,name_1,id_2,name_2 from gadm36" 67 | outfile = in_datafolder("gadmfields.csv") 68 | ogr2ogr_path() do ogr2ogr 69 | @time run(`$ogr2ogr -f CSV $outfile -sql $sql $shapefile`) 70 | end 71 | nothing 72 | end 73 | 74 | function rasterize_NUTS() 75 | println("\nRasterizing NUTS shapefile for European administrative areas...") 76 | name = "NUTS_RG_01M_2016_4326_LEVL_3" 77 | shapefile = in_datafolder("nuts2016-level3", "$name.shp") 78 | outfile = in_datafolder("nuts.tif") 79 | options = "-a ROWID -ot Int16 -tr 0.01 0.01 -te -180 -90 180 90 -co COMPRESS=LZW -dialect SQlite" 80 | sql = "select ROWID+1 AS ROWID,* from $name" 81 | @time rasterize(shapefile, outfile, split(options, ' '), sql=sql) 82 | 83 | println("Creating .csv file for regional index and name lookup...") 84 | outfile = in_datafolder("nutsfields.csv") 85 | sql = "select ROWID+1 AS ROWID,* from $name" 86 | ogr2ogr_path() do ogr2ogr 87 | @time run(`$ogr2ogr -f CSV $outfile -dialect SQlite -sql $sql $shapefile`) 88 | end 89 | nothing 90 | end 91 | 92 | function read_gadm() 93 | println("Reading GADM rasters...") 94 | gadmfields = readdlm(in_datafolder("gadmfields.csv"), ',', header=true)[1] 95 | imax = maximum(gadmfields[:,1]) 96 | subregionnames = fill("", (imax,3)) 97 | subregionnames[gadmfields[:,1],:] = string.(gadmfields[:,2:4]) 98 | gadm = readraster(in_datafolder("gadm.tif")) 99 | return gadm, subregionnames 100 | end 101 | 102 | function read_nuts() 103 | println("Reading NUTS rasters...") 104 | nutsfields = readdlm(in_datafolder("nutsfields.csv"), ',', header=true)[1] 105 | imax = maximum(nutsfields[:,1]) 106 | subregionnames = nutsfields[:,3] # indexes of NUTS regions are in order 1:2016, let's use that 107 | nuts = readraster(in_datafolder("nuts.tif")) 108 | return nuts, subregionnames 109 | end 110 | 111 | function rasterize_protected() 112 | println("\nRasterizing three WDPA shapefiles for protected areas (total run time 6 minutes - 2 hours)...") 113 | 114 | for i = 0:2 115 | println("\nFile $(i+1)/3:") 116 | shapefile = in_datafolder("WDPA", "WDPA-shapefile_$i", "WDPA-shapefile-polygons.shp") 117 | 118 | println("Rasterizing...") 119 | gdal_rasterize_path() do gdal_rasterize 120 | outfile = in_datafolder("WDPA", "protected_raster$i.tif") 121 | sql = "select FID from \"WDPA-shapefile-polygons\"" 122 | options = "-a FID -a_nodata -1 -ot Int32 -tr 0.01 0.01 -te -180 -90 180 90 -co COMPRESS=LZW" 123 | @time run(`$gdal_rasterize $(split(options, ' ')) -sql $sql $shapefile $outfile`) 124 | end 125 | 126 | println("Creating .csv file for WDPA index and name lookup...") 127 | ogr2ogr_path() do ogr2ogr 128 | outfile = in_datafolder("WDPA", "protectedfields$i.csv") 129 | sql = "select FID,IUCN_CAT from \"WDPA-shapefile-polygons\"" 130 | run(`$ogr2ogr -f CSV $outfile -sql $sql $shapefile`) 131 | end 132 | 133 | makeprotected(i) 134 | end 135 | 136 | println("\nMerging the three rasters...") 137 | protected = max.( 138 | JLD.load(in_datafolder("WDPA", "protected0.jld"), "protected"), 139 | JLD.load(in_datafolder("WDPA", "protected1.jld"), "protected"), 140 | JLD.load(in_datafolder("WDPA", "protected2.jld"), "protected") 141 | ) 142 | JLD.save(in_datafolder("protected.jld"), "protected", protected, compress=true) 143 | println("Done.") 144 | 145 | nothing 146 | end 147 | 148 | function makeprotected(n) 149 | println("Reading rasters...") 150 | protectedfields = readdlm(in_datafolder("WDPA", "protectedfields$n.csv"), ',', header=true)[1] 151 | IUCNcodes = ["Ia", "Ib", "II", "III", "IV", "V", "VI", "Not Reported", "Not Applicable", "Not Assigned"] 152 | IUCNlookup = Dict(c => i for (i,c) in enumerate(IUCNcodes)) 153 | protected0 = readraster(in_datafolder("WDPA", "protected_raster$n.tif")) 154 | 155 | println("Converting indexes to protected area types...") 156 | protected = similar(protected0, UInt8) 157 | # could replace loop with: map!(p -> p == -1 ? 0 : IUCNlookup[protectedfields[p+1,2], protected, protected0) 158 | # alternatively map!(p -> ifelse(p == -1, 0, IUCNlookup[protectedfields[p+1,2]), protected, protected0) 159 | @time for (i, p) in enumerate(protected0) 160 | protected[i] = (p == -1) ? 0 : IUCNlookup[protectedfields[p+1,2]] 161 | end 162 | println("Saving...") 163 | # JLD.save(in_datafolder("protected$n.jld"), "protected", protected, compress=true) 164 | JLD.save(in_datafolder("WDPA", "protected$n.jld"), "protected", protected) 165 | end 166 | 167 | function rasterize_timezones() 168 | println("Rasterizing shapefile of time zones...") 169 | shapefile = in_datafolder("timezones-with-oceans.shapefile", "dist", "combined-shapefile-with-oceans.shp") 170 | sql = "select FID+1 as FID from \"combined-shapefile-with-oceans\"" 171 | outfile = in_datafolder("timezones.tif") 172 | options = "-a FID -a_nodata 0 -ot Int16 -tr 0.01 0.01 -te -180 -90 180 90 -co COMPRESS=LZW" 173 | gdal_rasterize_path() do gdal_rasterize 174 | @time run(`$gdal_rasterize $(split(options, ' ')) -sql $sql $shapefile $outfile`) 175 | end 176 | 177 | println("Creating .csv file for time zone index and name lookup...") 178 | sql = "select FID+1 as FID,tzid from \"combined-shapefile-with-oceans\"" 179 | outfile = in_datafolder("timezone_names.csv") 180 | ogr2ogr_path() do ogr2ogr 181 | @time run(`$ogr2ogr -f CSV $outfile -sql $sql $shapefile`) 182 | end 183 | nothing 184 | end 185 | 186 | function maketimezones() 187 | println("Reading time zone raster file...") 188 | tznames = string.(readdlm(in_datafolder("timezone_names.csv"), ',', header=true)[1][:,2]) 189 | timezones = readraster(in_datafolder("timezones.tif")) 190 | 191 | f0 = findall(timezones.==0) # find any "no data" pixels (should only be one pixel in Canada near Halifax) 192 | for f in f0 193 | timezones[f] = maximum(timezones[f .+ CartesianIndices((-1:1, -1:1))]) # replace with largest neighbor 194 | end 195 | 196 | println("Saving time zones dataset...") 197 | JLD.save(in_datafolder("timezones.jld"), "timezones", timezones, "tznames", tznames, compress=true) 198 | end 199 | 200 | function loadtimezones(lonrange, latrange) 201 | jldopen(in_datafolder("timezones.jld"), "r") do file 202 | return read(file, "timezones")[lonrange, latrange], read(file, "tznames") 203 | end 204 | end 205 | 206 | function resample(infile::String, outfile::String, options::Vector{<:AbstractString}) 207 | gdal_translate_path() do gdal_translate 208 | @time run(`$gdal_translate $options -co COMPRESS=LZW $infile $outfile`) 209 | end 210 | end 211 | 212 | function downscale_landcover() 213 | println("\nDownscaling landcover dataset (2-10 minutes)...") 214 | infile = in_datafolder("Landcover - USGS MODIS.tif") 215 | options = "-r mode -ot Byte -tr 0.01 0.01" 216 | resample(infile, in_datafolder("landcover.tif"), split(options, ' ')) 217 | nothing 218 | end 219 | 220 | function savelandcover() 221 | println("Converting landcover dataset from TIFF to JLD...") 222 | landcover = readraster(in_datafolder("landcover.tif")) 223 | landtypes = [ 224 | "Water", "Evergreen Needleleaf Forests", "Evergreen Broadleaf Forests", "Deciduous Needleleaf Forests", "Deciduous Broadleaf Forests", 225 | "Mixed Forests", "Closed Shrublands", "Open Shrublands", "Woody Savannas", "Savannas", "Grasslands", "Permanent Wetlands", 226 | "Croplands", "Urban", "Cropland/Natural", "Snow/Ice", "Barren" 227 | ] 228 | landcolors = 1/255 * [ 229 | 190 247 255; 0 100 0; 77 167 86; 123 204 6; 104 229 104; 230 | 55 200 133; 216 118 118; 255 236 163; 182 231 140; 255 228 18; 255 192 107; 40 136 213; 231 | 255 255 0; 255 0 0; 144 144 0; 255 218 209; 190 190 190; 232 | ] 233 | println("Saving landcover dataset...") 234 | JLD.save(in_datafolder("landcover.jld"), "landcover", landcover, "landtypes", landtypes, 235 | "landcolors", landcolors, compress=true) 236 | end 237 | 238 | function upscale_topography() 239 | println("\nUpscaling topography dataset...") 240 | infile = in_datafolder("ETOPO1_Ice_c_geotiff.tif") 241 | options = "-r cubicspline -tr 0.01 0.01" 242 | outfile = in_datafolder("topography.tif") 243 | resample(infile, outfile, split(options, ' ')) 244 | println("Reading new topography raster...") 245 | topography = readraster(outfile) 246 | println("Saving topography dataset...") 247 | JLD.save(in_datafolder("topography.jld"), "topography", topography, compress=true) 248 | end 249 | 250 | # gettopography() = readraster("topography.tif") 251 | 252 | function downscale_population(scen, year) 253 | scen = lowercase(scen) 254 | println("Reading population dataset...") 255 | dataset = Dataset(in_datafolder("SSP_1km", "$(scen)_total_$year.nc4")) 256 | pop = replace(dataset["Band1"][:,:], missing => Float32(0)) 257 | 258 | lat = dataset["lat"][:] 259 | res = 0.5/60 # source resolution 0.5 arcminutes 260 | 261 | println("Padding and saving intermediate dataset...") 262 | skiptop = round(Int, (90-(lat[end]+res/2)) / res) 263 | skipbottom = round(Int, (lat[1]-res/2-(-90)) / res) 264 | nlons = size(pop,1) 265 | # the factor (.01/res)^2 is needed to conserve total population 266 | pop = [zeros(Float32,nlons,skiptop) reverse(pop, dims=2)*Float32((.01/res)^2) zeros(Float32,nlons,skipbottom)] 267 | temptiff = "$(tempname()).tif" 268 | temptiff2 = "$(tempname()).tif" 269 | saveTIFF(pop, temptiff) 270 | 271 | println("Downscaling population dataset...") 272 | options = "-r cubicspline -tr 0.01 0.01" 273 | resample(temptiff, temptiff2, split(options, ' ')) 274 | newpop = readraster(temptiff2) 275 | 276 | println("Saving population dataset...") 277 | JLD.save(in_datafolder("population_$(scen)_$year.jld"), "population", newpop, compress=true) 278 | 279 | rm(temptiff, force=true) 280 | rm(temptiff2, force=true) 281 | end 282 | 283 | getpopulation(scen, year) = JLD.load(in_datafolder("population_$(scen)_$year.jld"), "population") 284 | 285 | function createGDP(scen, year) 286 | scen = lowercase(scen) 287 | scennum = scen[end] 288 | println("Reading low resolution population and GDP datasets...") 289 | pop, extent = readraster(in_datafolder("global_population_and_gdp", "p$(scennum)_$year.tif"), :getextent) # million people 290 | gdp = readraster(in_datafolder("global_population_and_gdp", "g$(scennum)_$year.tif")) # billion USD(2005), PPP 291 | 292 | # Convert to USD 2010 using US consumer price index (CPI-U). CPI-U 2005: 195.3, CPI-U 2010: 218.056 293 | # https://www.usinflationcalculator.com/inflation/consumer-price-index-and-annual-percent-changes-from-1913-to-2008/ 294 | tempfile = tempname() 295 | gdp_per_capita = gdp./pop * 218.056/195.3 * 1000 # new unit: USD(2010)/person, PPP 296 | gdp_per_capita[pop.<=0] .= 0 # non land cells have pop & gdp set to -infinity, set to zero instead 297 | saveTIFF(gdp_per_capita, tempfile, extent) 298 | 299 | # println("Downscaling to high resolution and saving...") 300 | # options = "-r average -tr 0.01 0.01" 301 | # resample(tempfile, "gdp_per_capita_$(scen)_$year.tif", split(options, ' ')) 302 | # rm(tempfile) 303 | 304 | @time gdphigh = downscale_lowres_gdp_per_capita(tempfile, scen, year) # unit: USD(2010)/grid cell, PPP 305 | rm(tempfile, force=true) 306 | println("Saving high resolution GDP...") 307 | JLD.save(in_datafolder("gdp_$(scen)_$year.jld"), "gdp", gdphigh, compress=true) 308 | end 309 | 310 | function downscale_lowres_gdp_per_capita(tempfile, scen, year) 311 | println("Create high resolution GDP set using high resolution population and low resolution GDP per capita...") 312 | gpclow, extent = readraster(tempfile, :extend_to_full_globe) 313 | pop = getpopulation(scen, year) 314 | gdphigh = similar(pop, Float32) 315 | 316 | nrows, ncols = size(gpclow) 317 | sizemult = size(pop,1) ÷ nrows 318 | 319 | for c = 1:ncols 320 | cols = (c-1)*sizemult .+ (1:sizemult) 321 | for r = 1:nrows 322 | gpc = gpclow[r,c] 323 | rows = (r-1)*sizemult .+ (1:sizemult) 324 | gdphigh[rows,cols] = gpc * pop[rows,cols] 325 | end 326 | end 327 | return gdphigh 328 | end 329 | 330 | function creategridaccess(scen, year) 331 | println("Estimate high resolution grid access dataset by filtering gridded GDP...") 332 | gdp = JLD.load(in_datafolder("gdp_$(scen)_$year.jld"), "gdp") 333 | res = 360/size(gdp,1) 334 | 335 | disk = diskfilterkernel(1/6/res) # filter radius = 1/6 degrees 336 | gridaccess = gridsplit(gdp .> 100_000, x -> imfilter(x, disk), Float32) 337 | # gridaccess = Float32.(imfilter(gdp .> 100_000, disk)) # only "high" income cells included (100 kUSD/cell), cell size = 1x1 km 338 | println("\nCompressing...") 339 | selfmap!(x -> ifelse(x<1e-6, 0, x), gridaccess) # force small values to zero to reduce dataset size 340 | println("Saving high resolution grid access...") 341 | JLD.save(in_datafolder("gridaccess_$(scen)_$year.jld"), "gridaccess", gridaccess, compress=true) 342 | 343 | # maybe better: 344 | # loop through countries, index all pixels into vector, sort by GDP, use electrification to assign grid access 345 | end 346 | 347 | function getwindatlas(altitude=100) 348 | # filename = in_datafolder("gwa3_250_wind-speed_100m.tif") # v3.0 (lon extent [-180.3, 180.3], strangely) 349 | # filename = in_datafolder("global_ws.tif") # v2.3 (lon extent [-180.3, 180.3], strangely) 350 | # filename = in_datafolder("Global Wind Atlas v1 - 100m wind speed.tif") # v1.0 351 | # windatlas = readraster(filename, :extend_to_full_globe)[1] 352 | filename = in_datafolder("Global Wind Atlas v3 - $(altitude)m wind speed.tif") # v3.0 353 | windatlas = readraster(filename) 354 | clamp!(windatlas, 0, 25) 355 | end 356 | 357 | # Convert the Global Wind Atlas 3.0 dataset from 250 m to 1 km resolution. This reduces file size 358 | # from 13 GB to 1 GB. Also change its weird lon-lat extents to standard [-180,-90] - [180, 90]. 359 | # Interpolate using bilinear, which is a simple and robust choice for GIS applications that avoids 360 | # creating artifacts. See: 361 | # https://gis.stackexchange.com/questions/10931/what-is-lanczos-resampling-useful-for-in-a-spatial-context 362 | function downsample_windatlas3(altitude=100) 363 | infile = in_datafolder("gwa3_250_wind-speed_$(altitude)m.tif") 364 | gdalinfo_path() do gdalinfo 365 | run(`$gdalinfo $infile`) 366 | end 367 | println("\n") 368 | outfile = in_datafolder("Global Wind Atlas v3 - $(altitude)m wind speed.tif") 369 | options = split("-r bilinear -te -180 -90 180 90 -tr 0.01 0.01", ' ') 370 | gdalwarp_path() do gdalwarp 371 | @time run(`$gdalwarp $options -co COMPRESS=LZW $infile $outfile`) 372 | end 373 | end 374 | 375 | function ogrinfo(file, options=["-al", "-so"]) 376 | ogrinfo_path() do ogrinfo 377 | @time run(`$ogrinfo $options $file`) 378 | end 379 | end 380 | 381 | # GE.ogrinfo("D:/GISdata/Natura2000_end2019_Shapefile/Natura2000_end2019_epsg3035.shp") 382 | # GE.ogrinfo("D:/GISdata/Natura2000_end2019_Shapefile/Natura2000_end2019_epsg3035.shp", ["-dialect", "sqlite", "-sql", "select sitecode, sitename, release_da, ms, sitetype, inspire_id from Natura2000_end2019_epsg3035 limit 10"]) 383 | 384 | function rasterize_Natura2000() 385 | shapefile = in_datafolder("Natura2000_end2019_Shapefile", "Natura2000_end2019_epsg3035.shp") 386 | shapefile_proj = in_datafolder("Natura2000_end2019_Shapefile", "Natura2000_reprojected.shp") 387 | 388 | # Couldn't figure out how to make gdal_rasterize reproject on the fly, so... 389 | if !isfile(shapefile_proj) 390 | println("Reprojecting shapefile to EPSG:4326...") 391 | ogr2ogr_path() do ogr2ogr 392 | @time run(`$ogr2ogr -t_srs epsg:4326 -lco ENCODING=UTF-8 $shapefile_proj $shapefile`) 393 | end 394 | end 395 | 396 | println("Rasterizing...") 397 | gdal_rasterize_path() do gdal_rasterize 398 | outfile = in_datafolder("natura2000.tif") 399 | isfile(outfile) && rm(outfile) 400 | # https://sdi.eea.europa.eu/catalogue/copernicus9129929/api/records/e40ca403-b81a-4ecb-b484-cade980e9a2f 401 | # SITETYPE contains "A", "B" or "C": 402 | # A: SPAs (Special Protection Areas - sites designated under the Birds Directive); 403 | # B: SCIs and SACs (Sites of Community Importance and Special Areas of Conservation - sites designated under the Habitats Directive); 404 | # C: where SPAs and SCIs/SACs boundaries are identical (sites designated under both directives). 405 | sql = "select unicode(SITETYPE)-64 as CODE,* from Natura2000_reprojected" 406 | @time run(`$gdal_rasterize -a CODE -ot Byte -tr 0.01 0.01 -te -32 28 34 70 407 | -co COMPRESS=LZW -dialect sqlite -sql $sql $shapefile_proj $outfile`) 408 | end 409 | readraster(in_datafolder("natura2000.tif")) 410 | end 411 | 412 | function rasterize_MIUU() 413 | shapefile = in_datafolder("MIUU vindkartering-100m-sweref99", "vindkartering 2011_100m.shp") 414 | shapefile_proj = in_datafolder("MIUU vindkartering-100m-sweref99", "MIUU_reprojected.shp") 415 | 416 | # Couldn't figure out how to make gdal_rasterize reproject on the fly, so... 417 | if !isfile(shapefile_proj) 418 | println("Reprojecting shapefile to EPSG:4326...") 419 | ogr2ogr_path() do ogr2ogr 420 | @time run(`$ogr2ogr -t_srs epsg:4326 -lco ENCODING=UTF-8 $shapefile_proj $shapefile`) 421 | end 422 | end 423 | 424 | println("Rasterizing...") 425 | gdal_rasterize_path() do gdal_rasterize 426 | outfile = in_datafolder("miuu_windatlas.tif") 427 | isfile(outfile) && rm(outfile) 428 | @time run(`$gdal_rasterize -a Z -ot Float32 -tr 0.01 0.01 -te 10.65 55.07 24.17 69.07 429 | -co COMPRESS=LZW $shapefile_proj $outfile`) 430 | end 431 | readraster(in_datafolder("miuu_windatlas.tif")) 432 | end 433 | -------------------------------------------------------------------------------- /src/GISsolar.jl: -------------------------------------------------------------------------------- 1 | solaroptions() = Dict( 2 | :gisregion => "Europe8", # "Europe8", "Eurasia38", "Scand3" 3 | :filenamesuffix => "", # e.g. "_landx2" to save high land availability data as "GISdata_solar2018_Europe8_landx2.mat" 4 | 5 | :pv_density => 45, # Solar PV land use 45 Wp/m2 = 45 MWp/km2 (includes PV efficiency & module spacing, add latitude dependency later) 6 | :csp_density => 35, # CSP land use 35 W/m2 7 | 8 | :pvroof_area => .05, # area available for rooftop PV after the masks have been applied 9 | :plant_area => .05, # area available for PV or CSP plants after the masks have been applied 10 | 11 | :distance_elec_access => 150, # max distance to grid [km] (for solar classes of category B) 12 | :plant_persons_per_km2 => 150, # not too crowded, max X persons/km2 (both PV and CSP plants) 13 | :pvroof_persons_per_km2 => 200, # only in populated areas, so AT LEAST x persons/km2 14 | # US census bureau requires 1000 ppl/mile^2 = 386 ppl/km2 for "urban" (half in Australia) 15 | # roughly half the people of the world live at density > 300 ppl/km2 16 | :exclude_landtypes => [0,1,2,3,4,5,8], # exclude water and forests. See codes in table below. 17 | :protected_codes => [1,2,3,4,5,8], # IUCN codes to be excluded as protected areas. See codes in table below. 18 | 19 | :scenarioyear => "ssp2_2050", # default scenario and year for population and grid access datasets 20 | :era_year => 2018, # which year of the ERA5 time series to use 21 | 22 | :res => 0.01, # resolution of auxiliary datasets [degrees per pixel] 23 | :erares => 0.28125, # resolution of ERA5 datasets [degrees per pixel] 24 | 25 | :pvclasses_min => [0.08,0.14,0.18,0.22,0.26], # lower bound on annual PV capacity factor for class X [0:0.01:0.49;] 26 | :pvclasses_max => [0.14,0.18,0.22,0.26,1.00], # upper bound on annual PV capacity factor for class X [0.01:0.01:0.50;] 27 | :cspclasses_min => [0.10,0.18,0.24,0.28,0.32], # lower bound on annual CSP capacity factor for class X 28 | :cspclasses_max => [0.18,0.24,0.28,0.32,1.00], # upper bound on annual CSP capacity factor for class X 29 | 30 | :grid_everywhere => false, # set to true to assume all pixels have grid access 31 | :downsample_masks => 1, # set to 2 or higher to scale down mask sizes to avoid GPU errors in Makie plots for large regions 32 | :classB_threshold => 0.001 # minimum share of pixels within distance_elec_access km that must have grid access 33 | # for a pixel to be considered for solar class B. 34 | ) 35 | # Land types 36 | # 0 'Water' 37 | # 1 'Evergreen Needleleaf Forests' 38 | # 2 'Evergreen Broadleaf Forests' 39 | # 3 'Deciduous Needleleaf Forests' 40 | # 4 'Deciduous Broadleaf Forests' 41 | # 5 'Mixed Forests' 42 | # 6 'Closed Shrublands' 43 | # 7 'Open Shrublands' 44 | # 8 'Woody Savannas' 45 | # 9 'Savannas' 46 | # 10 'Grasslands' 47 | # 11 'Permanent Wetlands' 48 | # 12 'Croplands' 49 | # 13 'Urban' 50 | # 14 'Cropland/Natural' 51 | # 15 'Snow/Ice' 52 | # 16 'Barren' 53 | 54 | # Protected areas (IUCN codes from the WDPA) 55 | # 1 'Ia' 'Strict Nature Reserve' 56 | # 2 'Ib' 'Wilderness Area' 57 | # 3 'II' 'National Park' 58 | # 4 'III' 'Natural Monument' 59 | # 5 'IV' 'Habitat/Species Management' 60 | # 6 'V' 'Protected Landscape/Seascape' 61 | # 7 'VI' 'Managed Resource Protected Area' 62 | # 8 'Not Reported' 'Not Reported' 63 | # 9 'Not Applicable' 'Not Applicable' 64 | # 10 'Not Assigned' 'Not Assigned' 65 | 66 | 67 | mutable struct SolarOptions 68 | gisregion ::String 69 | filenamesuffix ::String 70 | pv_density ::Float64 # W/m2 71 | csp_density ::Float64 # W/m2 72 | pvroof_area ::Float64 # share [0-1] 73 | plant_area ::Float64 # share [0-1] 74 | distance_elec_access ::Float64 # km 75 | plant_persons_per_km2 ::Float64 # persons/km2 76 | pvroof_persons_per_km2 ::Float64 # persons/km2 77 | exclude_landtypes ::Vector{Int} 78 | protected_codes ::Vector{Int} 79 | scenarioyear ::String 80 | era_year ::Int 81 | res ::Float64 # degrees/pixel 82 | erares ::Float64 # degrees/pixel 83 | pvclasses_min ::Vector{Float64} 84 | pvclasses_max ::Vector{Float64} 85 | cspclasses_min ::Vector{Float64} 86 | cspclasses_max ::Vector{Float64} 87 | grid_everywhere ::Bool 88 | downsample_masks ::Int 89 | classB_threshold ::Float64 90 | end 91 | 92 | SolarOptions() = SolarOptions("","",0,0,0,0,0,0,0,[],[],"",0,0,0,[],[],[],[],false,0,0.0) 93 | 94 | function SolarOptions(d::Dict{Symbol,Any}) 95 | options = SolarOptions() 96 | for (key,val) in d 97 | setproperty!(options, key, val) 98 | end 99 | return options 100 | end 101 | 102 | function GISsolar(; savetodisk=true, plotmasks=false, optionlist...) 103 | 104 | # IMPORTANT!! The function makesolarera5() uses ERA5 solar datasets to 105 | # calculate Global Tilted Irradiance (GTI) for solar PV and Direct Normal 106 | # Irradiance (DNI) for CSP solar towers. If we want CSP parabolic troughs 107 | # then we need to add that dataset in makesolarera5() (and capacity factors 108 | # will become somewhat lower). 109 | 110 | # NOTE ON SOLAR UNIT: the solar irradiance sets are in kW/m2. Since the 111 | # irradiance value used to represent "standard testing conditions" for PV 112 | # is 1000 W/m2, the solar datasets also directly give the capacity factor. 113 | # Actual insolation can occasionally go above 1000 W/m2. 114 | 115 | # Ideally, we should make direct assumptions of PV module efficiency as a 116 | # function of air temperature (see e.g. Bett & Thornton appendix A2), but 117 | # for now efficiency is included in our assumption of :pv_density. Wind 118 | # speed also affects PV module temperature and efficiency. However, the 119 | # uncertainties in :pv_density and :plant_area are so large that efficiency 120 | # variations as a function of temperature don't matter. 121 | 122 | options = SolarOptions(merge(solaroptions(), optionlist)) 123 | @unpack gisregion, era_year, filenamesuffix, pv_density, csp_density, downsample_masks = options 124 | 125 | regions, offshoreregions, regionlist, gridaccess, popdens, topo, land, protected, lonrange, latrange = 126 | read_datasets(options) 127 | 128 | mask_rooftop, mask_plantA, mask_plantB = 129 | create_solar_masks(options, regions, gridaccess, popdens, land, protected, lonrange, latrange, 130 | plotmasks=plotmasks, downsample=downsample_masks) 131 | 132 | plotmasks == :onlymasks && return nothing 133 | 134 | meanGTI, solarGTI, meanDNI, solarDNI = read_solar_datasets(options, lonrange, latrange) 135 | 136 | CF_pvrooftop, CF_pvplantA, CF_pvplantB, CF_cspplantA, CF_cspplantB, solar_overlap_areaA, solar_overlap_areaB, 137 | capacity_pvrooftop, capacity_pvplantA, capacity_pvplantB, capacity_cspplantA, capacity_cspplantB = 138 | calc_solar_vars(options, meanGTI, solarGTI, meanDNI, solarDNI, regions, offshoreregions, regionlist, 139 | mask_rooftop, mask_plantA, mask_plantB, lonrange, latrange) 140 | 141 | if savetodisk 142 | mkpath(in_datafolder("output")) 143 | matopen(in_datafolder("output", "GISdata_solar$(era_year)_$gisregion$filenamesuffix.mat"), "w", compress=true) do file 144 | write(file, "CFtime_pvrooftop", CF_pvrooftop) 145 | write(file, "CFtime_pvplantA", CF_pvplantA) 146 | write(file, "CFtime_pvplantB", CF_pvplantB) 147 | write(file, "CFtime_cspplantA", CF_cspplantA) 148 | write(file, "CFtime_cspplantB", CF_cspplantB) 149 | write(file, "capacity_pvrooftop", capacity_pvrooftop) 150 | write(file, "capacity_pvplantA", capacity_pvplantA) 151 | write(file, "capacity_pvplantB", capacity_pvplantB) 152 | write(file, "capacity_cspplantA", capacity_cspplantA) 153 | write(file, "capacity_cspplantB", capacity_cspplantB) 154 | write(file, "solar_overlap_areaA", solar_overlap_areaA) 155 | write(file, "solar_overlap_areaB", solar_overlap_areaB) 156 | write(file, "pv_density", pv_density) 157 | write(file, "csp_density", csp_density) 158 | end 159 | end 160 | 161 | nothing 162 | # return CF_pvrooftop, CF_pvplantA, CF_pvplantB, CF_cspplantA, CF_cspplantB, 163 | # capacity_pvrooftop, capacity_pvplantA, capacity_pvplantB, capacity_cspplantA, capacity_cspplantB 164 | end 165 | 166 | function read_solar_datasets(options, lonrange, latrange) 167 | @unpack res, erares, era_year = options 168 | 169 | println("Reading ERA5 solar datasets...") 170 | eralonranges, eralatrange = eraranges(lonrange, latrange, res, erares) 171 | 172 | @time meanGTI, solarGTI, meanDNI, solarDNI = h5open(in_datafolder("era5solar$era_year.h5"), "r") do file 173 | if length(eralonranges) == 1 174 | file["meanGTI"][eralonranges[1], eralatrange], 175 | file["GTI"][:,eralonranges[1], eralatrange], 176 | file["meanDNI"][eralonranges[1], eralatrange], 177 | file["DNI"][:,eralonranges[1], eralatrange] 178 | else 179 | [file["meanGTI"][eralonranges[1], eralatrange]; file["meanGTI"][eralonranges[2], eralatrange]], 180 | [file["GTI"][:, eralonranges[1], eralatrange] file["GTI"][:, eralonranges[2], eralatrange]], 181 | [file["meanDNI"][eralonranges[1], eralatrange]; file["meanDNI"][eralonranges[2], eralatrange]], 182 | [file["DNI"][:, eralonranges[1], eralatrange] file["DNI"][:, eralonranges[2], eralatrange]] 183 | end 184 | end 185 | return meanGTI, solarGTI, meanDNI, solarDNI 186 | end 187 | 188 | function create_solar_masks(options, regions, gridaccess, popdens, land, protected, lonrange, latrange; plotmasks=false, downsample=1) 189 | @unpack res, gisregion, exclude_landtypes, protected_codes, distance_elec_access, plant_persons_per_km2, 190 | pvroof_persons_per_km2, classB_threshold, filenamesuffix, grid_everywhere = options 191 | 192 | println("Creating masks...") 193 | 194 | goodland = (regions .> 0) 195 | for i in exclude_landtypes 196 | goodland[land .== i] .= false 197 | end 198 | protected_area = zeros(Bool, size(protected)) 199 | for i in protected_codes 200 | protected_area[protected .== i] .= true 201 | end 202 | 203 | # Pixels with electricity access for onshore wind A 204 | gridA = grid_everywhere ? fill(true, size(gridaccess)) : (gridaccess .> 0) 205 | 206 | # Pixels with electricity access for onshore wind B and offshore wind 207 | km_per_degree = π*2*6371/360 208 | disk = diskfilterkernel(distance_elec_access/km_per_degree/res) 209 | gridB = (imfilter(gridaccess, disk) .> max(1e-9, classB_threshold)) # avoid artifacts if classB_threshold == 0 210 | 211 | # println("MAKE SURE MASKS DON'T OVERLAP! (regions & offshoreregions, mask_*)") 212 | 213 | # all mask conditions 214 | mask_rooftop = gridA .& (popdens .> pvroof_persons_per_km2) .& .!protected_area 215 | mask_plantA = gridA .& (popdens .< plant_persons_per_km2) .& goodland .& .!protected_area 216 | mask_plantB = (gridB .& .!gridA) .& (popdens .< plant_persons_per_km2) .& goodland .& .!protected_area 217 | 218 | if plotmasks != false # can == :onlymasks as well 219 | # drawmap(land) 220 | isregion = (regions .> 0) .& (regions .!= NOREGION) 221 | 222 | # mask values refer to colors in ColorBrewer Set2_7: 223 | # https://juliagraphics.github.io/ColorSchemes.jl/stable/basics/#colorbrewer-1 224 | masks = zeros(Int16, size(regions)) 225 | masks[(masks .== 0) .& (popdens .> plant_persons_per_km2)] .= 2 226 | masks[(masks .== 0) .& protected_area] .= 3 227 | masks[(masks .== 0) .& .!gridA .& .!gridB] .= 4 228 | masks[(masks .== 0) .& .!goodland] .= 1 229 | masks[(masks .== 0) .& .!gridA .& gridB] .= 6 230 | masks[(masks .== 0) .& isregion] .= 5 231 | masks[regions .== 0] .= 0 232 | masks[regions .== NOREGION] .= NOREGION 233 | legendtext = ["bad land type", "high population", "protected area", "no grid", "solar plant A", "solar plant B", "", ""] 234 | maskmap("$(gisregion)_masks_solar$filenamesuffix", masks, legendtext, lonrange, latrange; legend=true, downsample=downsample) 235 | end 236 | 237 | return mask_rooftop, mask_plantA, mask_plantB 238 | end 239 | 240 | function increment_solarCF!(cf::AbstractVector{<:AbstractFloat}, solardata::AbstractVector{<:AbstractFloat}) 241 | @inbounds for i = 1:length(cf) 242 | cf[i] += solardata[i] 243 | end 244 | end 245 | 246 | function makesolarclasses(options, meanGTI, meanDNI) 247 | println("Allocating pixels to classes using ERA5 annual means...") 248 | 249 | @unpack pvclasses_min, pvclasses_max, cspclasses_min, cspclasses_max = options 250 | 251 | pvclass = getclasses(meanGTI, pvclasses_min, pvclasses_max) 252 | cspclass = getclasses(meanDNI, cspclasses_min, cspclasses_max) 253 | 254 | return pvclass, cspclass 255 | end 256 | 257 | function calc_solar_vars(options, meanGTI, solarGTI, meanDNI, solarDNI, regions, offshoreregions, regionlist, 258 | mask_rooftop, mask_plantA, mask_plantB, lonrange, latrange) 259 | 260 | pvclass, cspclass = makesolarclasses(options, meanGTI, meanDNI) 261 | eralons, eralats, lonmap, latmap, cellarea = eralonlat(options, lonrange, latrange) 262 | 263 | println("Calculating GW potential and hourly capacity factors for each region and class...") 264 | # println("Interpolate ERA5 insolation later (maybe 4x runtime).") 265 | 266 | @unpack era_year, pvclasses_min, cspclasses_min, res, erares, pv_density, csp_density, pvroof_area, plant_area = options 267 | 268 | numreg = length(regionlist) 269 | npvclasses, ncspclasses = length(pvclasses_min), length(cspclasses_min) 270 | yearlength, nlons, nlats = size(solarGTI) 271 | firsttime = DateTime(era_year, 1, 1) 272 | 273 | capacity_pvrooftop = zeros(numreg,npvclasses) 274 | capacity_pvplantA = zeros(numreg,npvclasses) 275 | capacity_pvplantB = zeros(numreg,npvclasses) 276 | capacity_cspplantA = zeros(numreg,ncspclasses) 277 | capacity_cspplantB = zeros(numreg,ncspclasses) 278 | CF_pvrooftop = zeros(yearlength,numreg,npvclasses) 279 | CF_pvplantA = zeros(yearlength,numreg,npvclasses) 280 | CF_pvplantB = zeros(yearlength,numreg,npvclasses) 281 | CF_cspplantA = zeros(yearlength,numreg,ncspclasses) 282 | CF_cspplantB = zeros(yearlength,numreg,ncspclasses) 283 | count_pvrooftop = zeros(Int,numreg,npvclasses) 284 | count_pvplantA = zeros(Int,numreg,npvclasses) 285 | count_pvplantB = zeros(Int,numreg,npvclasses) 286 | count_cspplantA = zeros(Int,numreg,ncspclasses) 287 | count_cspplantB = zeros(Int,numreg,ncspclasses) 288 | solar_overlap_areaA = zeros(numreg,npvclasses,ncspclasses) 289 | solar_overlap_areaB = zeros(numreg,npvclasses,ncspclasses) 290 | 291 | # Run times vary wildly depending on geographical area (because of far offshore regions with mostly zero wind speeds). 292 | # To improve the estimated time of completing the progress bar, iterate over latitudes in random order. 293 | Random.seed!(1) 294 | updateprogress = Progress(nlats, 1) 295 | for j in randperm(nlats) 296 | eralat = eralats[j] 297 | colrange = latmap[lat2col(eralat+erares/2, res):lat2col(eralat-erares/2, res)-1] 298 | for i = 1:nlons 299 | meanGTI[i,j] == 0 && meanDNI[i,j] == 0 && continue 300 | GTI = solarGTI[:, i, j] 301 | DNI = solarDNI[:, i, j] 302 | eralon = eralons[i] 303 | # get all high resolution row and column indexes within this ERA5 cell 304 | rowrange = lonmap[lon2row(eralon-erares/2, res):lon2row(eralon+erares/2, res)-1] 305 | 306 | for c in colrange, r in rowrange 307 | (c == 0 || r == 0) && continue 308 | reg = regions[r,c] 309 | (reg == 0 || reg == NOREGION) && continue 310 | 311 | area = cellarea[c] 312 | class = pvclass[i,j] 313 | # can't use elseif here, probably some overlap in the masks 314 | # @views is needed to make sure increment_windCF!() works with matrix slices 315 | # also faster since it avoids making copies 316 | @views if class > 0 317 | if mask_rooftop[r,c] > 0 318 | capacity_pvrooftop[reg,class] += 1/1000 * pv_density * pvroof_area * area 319 | increment_solarCF!(CF_pvrooftop[:,reg,class], GTI) 320 | count_pvrooftop[reg,class] += 1 321 | elseif mask_plantA[r,c] > 0 322 | capacity_pvplantA[reg,class] += 1/1000 * pv_density * plant_area * area 323 | increment_solarCF!(CF_pvplantA[:,reg,class], GTI) 324 | count_pvplantA[reg,class] += 1 325 | elseif mask_plantB[r,c] > 0 326 | capacity_pvplantB[reg,class] += 1/1000 * pv_density * plant_area * area 327 | increment_solarCF!(CF_pvplantB[:,reg,class], GTI) 328 | count_pvplantB[reg,class] += 1 329 | end 330 | end 331 | 332 | class_pv = class 333 | class = cspclass[i,j] 334 | # @views is needed to make sure increment_windCF!() works with matrix slices 335 | # also faster since it avoids making copies 336 | @views if class > 0 337 | if mask_plantA[r,c] > 0 338 | capacity_cspplantA[reg,class] += 1/1000 * csp_density * plant_area * area 339 | increment_solarCF!(CF_cspplantA[:,reg,class], DNI) 340 | count_cspplantA[reg,class] += 1 341 | elseif mask_plantB[r,c] > 0 342 | capacity_cspplantB[reg,class] += 1/1000 * csp_density * 2 * plant_area * area 343 | increment_solarCF!(CF_cspplantB[:,reg,class], DNI) 344 | count_cspplantB[reg,class] += 1 345 | end 346 | end 347 | 348 | if class_pv > 0 && class > 0 349 | if mask_plantA[r,c] > 0 350 | solar_overlap_areaA[reg,class_pv,class] += 1/1000 * plant_area * area 351 | elseif mask_plantB[r,c] > 0 352 | solar_overlap_areaB[reg,class_pv,class] += 1/1000 * 2 * plant_area * area 353 | end 354 | end 355 | end 356 | end 357 | next!(updateprogress) 358 | end 359 | 360 | for y = 1:yearlength 361 | CF_pvrooftop[y,:,:] ./= count_pvrooftop 362 | CF_pvplantA[y,:,:] ./= count_pvplantA 363 | CF_pvplantB[y,:,:] ./= count_pvplantB 364 | CF_cspplantA[y,:,:] ./= count_cspplantA 365 | CF_cspplantB[y,:,:] ./= count_cspplantB 366 | end 367 | 368 | return CF_pvrooftop, CF_pvplantA, CF_pvplantB, CF_cspplantA, CF_cspplantB, solar_overlap_areaA, solar_overlap_areaB, 369 | capacity_pvrooftop, capacity_pvplantA, capacity_pvplantB, capacity_cspplantA, capacity_cspplantB 370 | end 371 | 372 | 373 | 374 | 375 | 376 | # Quick and ugly copy/paste hack to create resource maps for solar classes combined with masks. 377 | function GISsolarmap(; optionlist...) 378 | options = SolarOptions(merge(solaroptions(), optionlist)) 379 | @unpack gisregion, era_year, filenamesuffix = options 380 | 381 | regions, offshoreregions, regionlist, gridaccess, popdens, topo, land, protected, lonrange, latrange = 382 | read_datasets(options) 383 | meanGTI, solarGTI, meanDNI, solarDNI = read_solar_datasets(options, lonrange, latrange) 384 | 385 | mask_rooftop, mask_plantA, mask_plantB = 386 | create_solar_masks(options, regions, gridaccess, popdens, land, protected, lonrange, latrange, plotmasks=true) 387 | 388 | pvmap, pvrooftopmap, cspmap = 389 | calc_solar_map(options, meanGTI, solarGTI, meanDNI, solarDNI, regions, offshoreregions, regionlist, 390 | mask_rooftop, mask_plantA, mask_plantB, lonrange, latrange) 391 | 392 | return pvmap, pvrooftopmap, cspmap 393 | end 394 | 395 | function calc_solar_map(options, meanGTI, solarGTI, meanDNI, solarDNI, regions, offshoreregions, regionlist, 396 | mask_rooftop, mask_plantA, mask_plantB, lonrange, latrange) 397 | 398 | pvclass, cspclass = makesolarclasses(options, meanGTI, meanDNI) 399 | eralons, eralats, lonmap, latmap, cellarea = eralonlat(options, lonrange, latrange) 400 | 401 | println("Calculating GW potential and hourly capacity factors for each region and class...") 402 | println("Interpolate ERA5 insolation later (maybe 4x runtime).") 403 | 404 | @unpack era_year, pvclasses_min, cspclasses_min, res, erares, pv_density, csp_density, pvroof_area, plant_area = options 405 | 406 | numreg = length(regionlist) 407 | npvclasses, ncspclasses = length(pvclasses_min), length(cspclasses_min) 408 | yearlength, nlons, nlats = size(solarGTI) 409 | firsttime = DateTime(era_year, 1, 1) 410 | 411 | # pvmap = zeros(size(regions)) 412 | # pvrooftopmap = zeros(size(regions)) 413 | # cspmap = zeros(size(regions)) 414 | pvmap = zeros(Int16, size(regions)) 415 | pvrooftopmap = zeros(Int16, size(regions)) 416 | cspmap = zeros(Int16, size(regions)) 417 | 418 | # Run times vary wildly depending on geographical area (because of far offshore regions with mostly zero wind speeds). 419 | # To improve the estimated time of completing the progress bar, iterate over latitudes in random order. 420 | Random.seed!(1) 421 | updateprogress = Progress(nlats, 1) 422 | @inbounds for j in randperm(nlats) 423 | eralat = eralats[j] 424 | colrange = latmap[lat2col(eralat+erares/2, res):lat2col(eralat-erares/2, res)-1] 425 | for i = 1:nlons 426 | meanGTI[i,j] == 0 && meanDNI[i,j] == 0 && continue 427 | GTI = solarGTI[:, i, j] 428 | DNI = solarDNI[:, i, j] 429 | eralon = eralons[i] 430 | # get all high resolution row and column indexes within this ERA5 cell 431 | rowrange = lonmap[lon2row(eralon-erares/2, res):lon2row(eralon+erares/2, res)-1] 432 | 433 | for c in colrange, r in rowrange 434 | (c == 0 || r == 0) && continue 435 | reg = regions[r,c] 436 | area = cellarea[c] 437 | 438 | class = pvclass[i,j] 439 | # can't use elseif here, probably some overlap in the masks 440 | # @views is needed to make sure increment_windCF!() works with matrix slices 441 | # also faster since it avoids making copies 442 | @views if reg > 0 && class > 0 443 | if mask_rooftop[r,c] > 0 444 | # pvrooftopmap[r,c] = meanGTI[i,j] 445 | pvrooftopmap[r,c] = class 446 | elseif mask_plantA[r,c] > 0 447 | # pvmap[r,c] = meanGTI[i,j] 448 | pvmap[r,c] = class 449 | elseif mask_plantB[r,c] > 0 450 | # pvmap[r,c] = class 451 | end 452 | end 453 | 454 | class = cspclass[i,j] 455 | # @views is needed to make sure increment_windCF!() works with matrix slices 456 | # also faster since it avoids making copies 457 | @views if reg > 0 && class > 0 458 | if mask_plantA[r,c] > 0 459 | # cspmap[r,c] = meanGTI[i,j] 460 | cspmap[r,c] = class 461 | elseif mask_plantB[r,c] > 0 462 | # cspmap[r,c] = class 463 | end 464 | end 465 | end 466 | end 467 | next!(updateprogress) 468 | end 469 | 470 | return pvmap, pvrooftopmap, cspmap 471 | end --------------------------------------------------------------------------------