File:Estimated limit of human habitat europe lgm 1.svg

From Wikimedia Commons, the free media repository
Jump to navigation Jump to search

Original file(SVG file, nominally 1,200 × 795 pixels, file size: 1.74 MB)

Captions

Captions

Estimated northern limit of human habitat in Europe during Last Glacial Maximum

Summary

[edit]
Description
English: Estimated northern limit of human habitat in Europe during Last Glacial Maximum
Date
Source Own work
Author Merikanto

Source data for this image production is CHELSA LGM CCSM4 dataset

Ptopet data is generated with ENVIREM from CHELSA ratsers

Sites data is from

"Demographic estimates of hunter–gatherers during the Last Glacial Maximum in Europe against the background of palaeoenvironmental data"

plus additional intuitive points,

https://www.sciencedirect.com/science/article/abs/pii/S1040618216300490

Python script to produce output

    1. netcdf downscaler
    2. with regression test
    3. also habitat test
    4. python 3,GDAL
    5. v 00019
    6. 20.10.2010

import numpy as np import scipy import pandas as pd import matplotlib.pyplot as plt import matplotlib.image as mpimg import netCDF4 as nc import os from scipy.interpolate import Rbf

from sklearn.linear_model import LinearRegression from sklearn.preprocessing import PolynomialFeatures from sklearn.pipeline import make_pipeline from sklearn.ensemble import RandomForestRegressor from sklearn.preprocessing import StandardScaler from sklearn.model_selection import train_test_split from sklearn import svm, metrics from pygam import LogisticGAM from pygam import LinearGAM from sklearn.neighbors import KernelDensity

import pandas as pd import array as arr import scipy.stats

from sklearn.tree import DecisionTreeClassifier from skmultilearn.problem_transform import BinaryRelevance

  1. if basemap is available, we'll use it.
  2. otherwise, we'll improvise later...

try:

   from mpl_toolkits.basemap import Basemap
   basemap = True

except ImportError:

   basemap = False

cache_lons1=[] cache_lats1=[] cache_x=[] cache_y=[] cache_z=[] cache_x2=[] cache_y2=[] cache_z2=[]

def save_points_toxyz(filename, x,y,z): print("Dummy function only") return(0)

def funk_test_00( x1, y1, x2): print ("MLR") xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2))


model = LinearRegression().fit(x, y) #degree=3 #polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) #model=polyreg.fit(x,y)

y2= model.predict(xb)

k_std=0.00001 y2_mean=np.mean(y2) y2_std=np.std(y2)*k_std y2_cover_std=(y2-y2_mean)/y2_std y3=scipy.stats.norm.sf(y2_cover_std,y2_mean,y2_std) ymin=np.min(y3) deltamax=np.max(y3)-np.min(y3) y4=((y3-ymin)/deltamax)

return(y4)

def get_z_points_from_memory(imga,lonsa,latsa, pointsx1,pointsy1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z global cache_x2 global cache_y2 global cache_z2

cache_lons1=[] cache_lats1=[] cache_x=[] cache_y=[] cache_z=[] cache_x2=[] cache_y2=[] cache_z2=[]

#gdal_get_z_points_from_memory(zz,lonsa, latsa, klons1, klats1)

#print (lonsa)

print (len(lonsa)) print (np.shape(imga))

#quit(-1)

dim1=imga.shape nlat1=dim1[0] nlon1=dim1[1]

pitu=len(pointsx1)


lonaa=np.array(lonsa).ravel() lataa=np.array(latsa).ravel()

#print(lataa)

#quit(0)

for n in range(0,pitu): px1=pointsx1[n] py1=pointsy1[n]

#px1=10.0 #py1=45.0

idx, = np.where(np.isclose(lonaa, px1, atol=0.05 )) idy, = np.where(np.isclose(lataa, py1, atol=0.05 ))

#print (idx) #print (idy) zv1=imga[idx, idy] #zv1=imga[idy, idx] cache_z.append(zv1[0]) #print(zv1) #quit(0)

#print(cache_z) cache_lons1=lonsa cache_lats1=latsa

#print (pz1) return (cache_z)

def kde_raster(x, y, bandwith, lon1, lat1, lon2, lat2, xbins, ybins, **kwargs): ## note: xbins, ybins must be complex numbers, datatype "10j" or complex(10) xx, yy = np.mgrid[lon1:lon2:xbins,lat1:lat2:ybins] xy_sample = np.vstack([yy.ravel(), xx.ravel()]).T xy_train = np.vstack([y, x]).T kde_skl = KernelDensity(bandwidth=bandwith, **kwargs) kde_skl.fit(xy_train) z = np.exp(kde_skl.score_samples(xy_sample)) return xx, yy, np.reshape(z, xx.shape)

def load_xy(infname1, lonname1, latname1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z global cache_x2 global cache_y2 global cache_z2

indf1=pd.read_csv(infname1, sep=';') #print(indf1)

templons1=indf1[lonname1] templats1=indf1[latname1]

cache_lons1=np.array(templons1) cache_lats1=np.array(templats1)


#print (cache_lons1) #print (cache_lats1)

return(0)


def preprocess_big_raster(infilename1, invarname1, sabluname1, outfilename1, soutfilename1, outvarname1,lon1, lon2, lat1, lat2, width1, height1, roto): gdal_cut_resize_fromnc_tonc(infilename1, sabluname1, outfilename1, invarname1,lon1, lon2, lat1, lat2) gdal_cut_resize_tonc(infilename1,soutfilename1 ,lon1, lon2, lat1, lat2, width1, height1) return(0)

def preprocess_small_raster(infilename1, invarname1, intime1, outfilename1, outvarname1,lon1, lon2, lat1, lat2, roto): tempfilename1="./temp00.nc" tempfilename2="./temp01.nc" loadnetcdf_timed_tofile(infilename1, invarname1, intime1, tempfilename1, outvarname1) rotate_netcdf_360_to_180(tempfilename1, outvarname1,tempfilename2, outvarname1) gdal_cut_tonc(tempfilename2,outfilename1 ,lon1, lon2, lat1, lat2) return(0)


def rotate_netcdf_360_to_180(infilename1, invarname1,outfilename1, outvarname1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z pointsx1=[] pointsy1=[] gdal_get_nc_to_xyz_in_mem(infilename1, invarname1 ) lonsa=cache_lons1 latsa=cache_lats1 pointsz1=np.array(cache_z) pointsx1=np.array(cache_x) pointsy1=np.array(cache_y) nlatsa1=len(latsa) nlonsa1=len(lonsa) pointsz3=np.reshape(pointsz1, (nlatsa1, nlonsa1)) rama1=int(len(lonsa)/2) pointsz4=np.roll(pointsz3,rama1,axis=1) lonsb=lonsa-180 save_points_to_netcdf(outfilename1, outvarname1, lonsb, latsa, pointsz4) return(0)

def gdal_get_nc_to_xyz_in_mem(inname1, invar1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z global cache_x2 global cache_y2 global cache_z2

imga=loadnetcdf_single_tomem(inname1, invar1) #plt.imshow(imga) lonsa=cache_lons1 latsa=cache_lats1

cache_lons1=[] cache_lats1=[] cache_x=[] cache_y=[] cache_z=[] cache_x2=[] cache_y2=[] cache_z2=[]

dim1=imga.shape nlat1=len(latsa) nlon1=len(lonsa)

#plt.plot(imga) #plt.show() #print(nlat1) #print(nlon1)


#quit(-1) #print(inname1) #print (nlat1)

#quit(-1)

for iy in range(0,nlat1): for ix in range(0,nlon1): pz1=imga[iy,ix] if (str(pz1) == '--'): cache_x.append(lonsa[ix]) cache_y.append(latsa[iy]) cache_z.append(0) else: cache_x.append(lonsa[ix]) cache_y.append(latsa[iy]) cache_z.append(pz1)


#print(cache_z) cache_lons1=lonsa cache_lats1=latsa

#print (pz1) return (cache_z)

def average_tables(daata1, daata2): daata3=daata1 pitu=len(daata1) for n in range(0,pitu): daata3[n]=(daata1[n]+daata2[n])/2

return(daata3)

def add_scalar(daata, skalar): pitu=len(daata) for n in range(0,pitu): daata[n]=daata[n]+skalar

return(daata)


def gam_multiple_vars( x1, y1, x2): print ("GAM") xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2)

#print (xa1)

  1. quit(-1)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2))

#sc = StandardScaler() #xa = sc.fit_transform(x) #xba = sc.transform(xb)

#model = LogisticGAM().fit(x, y) model = LinearGAM().fit(x, y)

y2= model.predict(xb)

return(y2)

def random_forest_multiple_vars( x1, y1, x2): print ("RF") xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2)

#print (xa1)

  1. quit(-1)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2))


#model = LinearRegression().fit(x, y) #degree=3 #polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) #model=polyreg.fit(x,y)

sc = StandardScaler() xa = sc.fit_transform(x) xba = sc.transform(xb)


model = RandomForestRegressor(n_estimators=10, max_features=2).fit(xa,y)


y2= model.predict(xba) return(y2)

def linear_regression_multiple_vars( x1, y1, x2): print ("MLR") xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2))


#model = LinearRegression().fit(x, y) degree=3 polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) model=polyreg.fit(x,y)

y2= model.predict(xb) return(y2)

def linear_regression_singe_var( x1, y1, x2): #print (x1) #print(y1) #return(0)

#xa1=np.array(x1) #ya1=np.array(y1) #xa2=np.array(x2) xa1=np.array( x1) ya1=np.array(y1) xa2=np.array(x2)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2)) #x=xa1.reshape((-1, 1)) #y=ya1.reshape((-1, 1)) #xb=xa2.reshape((-1, 1))

#print (x) #print (y)

model = LinearRegression().fit(x, y) #model = RandomForestRegressor(n_estimators=10, max_features=2).fit(x,y) #degree=2 #polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) #polyreg=make_pipeline(PolynomialFeatures(degree), ) #model=polyreg.fit(x,y)

## warning not xb y2= model.predict(xb) #print(y2) #print("LR") return(y2)

def save_points_to_netcdf(outfilename1, outvarname1, xoutlons1, xoutlats1, zoutvalue1):

nlat1=len(xoutlats1) nlon1=len(xoutlons1) #print ("Save ...") #print (nlat1) #print (nlon1) zoutvalue2=np.array(zoutvalue1).reshape(nlat1, nlon1) #indata_set1=indata1 ncout1 = nc.Dataset(outfilename1, 'w', format='NETCDF4') outlat1 = ncout1.createDimension('lat', nlat1) outlon1 = ncout1.createDimension('lon', nlon1) outlats1 = ncout1.createVariable('lat', 'f4', ('lat',)) outlons1 = ncout1.createVariable('lon', 'f4', ('lon',)) outvalue1 = ncout1.createVariable(outvarname1, 'f4', ('lat', 'lon',)) outvalue1.units = 'Unknown' outlats1[:] = xoutlats1 outlons1[:] = xoutlons1 outvalue1[:, :] =zoutvalue2[:] ncout1.close() return 0

def gdal_cut_resize_fromnc_tonc(inname1, inname2, outname2, invar2,lon1, lon2, lat1, lat2): imga=loadnetcdf_single_tomem(inname2, invar2) dim1=imga.shape height1=dim1[0] width1=dim1[1] print (width1) print (height1) jono1="gdalwarp -te "+str(lon1)+" "+str(lat1)+" "+str(lon2)+" "+str(lat2)+" "+"-ts "+str(width1)+" "+str(height1)+ " " +inname1+" "+outname2 print(jono1) os.system(jono1) return

def gdal_get_points_from_file(inname1, invar1,pointsx1,pointsy1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z global cache_x2 global cache_y2 global cache_z2

imga=loadnetcdf_single_tomem(inname1, invar1) #plt.imshow(imga) lonsa=cache_lons1 latsa=cache_lats1

cache_lons1=[] cache_lats1=[] cache_x=[] cache_y=[] cache_z=[] cache_x2=[] cache_y2=[] cache_z2=[]

dim1=imga.shape nlat1=dim1[0] nlon1=dim1[1]

pitu=len(pointsx1) #px1=10 #py1=45 for n in range(0,pitu): px1=pointsx1[n] py1=pointsy1[n] #print('.') for iy in range(0,nlat1): if(py1>=latsa[iy]): for ix in range(0,nlon1): if(px1>=lonsa[ix]): pz1=imga[iy,ix] cache_x.append(lonsa[ix]) cache_y.append(latsa[iy]) cache_z.append(pz1)


#print(cache_z) cache_lons1=lonsa cache_lats1=latsa

#print (pz1) return (cache_z)

def gdal_cut_resize_tonc(inname1, outname1, lon1, lon2, lat1, lat2, width1, height1): #gdalwarp -te -5 41 10 51 -ts 1000 0 input.tif output.tif jono1="gdalwarp -of netcdf -te "+str(lon1)+" "+str(lat1)+" "+str(lon2)+" "+str(lat2)+" "+"-ts "+str(width1)+" "+str(height1)+ " " +inname1+" "+outname1 print(jono1) os.system(jono1) return

def interpolate_cache(x_min, y_min, x_max, y_max, reso1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z global cache_x2 global cache_y2 global cache_z2

cache_x2=[] cache_y2=[] cache_z2=[] cache_lons1=[] cache_lats1=[]

pitu1=len(cache_z)

raja1=pitu1

for i in range(0,raja1): #print (i) #print (cache_z[i]) try: xx=cache_x[i] yy=cache_y[i] zz=cache_z[i] if (str(zz) == '--'): raja1=raja1-1 #print (zz) #print(".") else: cache_x2.append(xx) cache_y2.append(yy) cache_z2.append(zz) except IndexError: print("BRK") break

lonsn=(int(x_max-x_min)/reso1) latsn=(int(y_max-y_min)/reso1) cache_lons1=np.linspace(x_min, x_max, lonsn) cache_lats1=np.linspace(y_min, y_max, latsn)

#print (cache_z2) #print (cache_x2) #exit(-1)

grid_x, grid_y = np.mgrid[x_min:x_max:reso1, y_min:y_max:reso1] rbfi = Rbf(cache_x2, cache_y2, cache_z2) di = rbfi(grid_x, grid_y) #plt.figure(figsize=(15, 15)) #plt.imshow(di.T, origin="lower") #cb = plt.scatter(df.x, df.y, s=60, c=df.por, edgecolor='#ffffff66') #plt.colorbar(cb, shrink=0.67) #plt.show() return(di.T)

def makepoints(imgin1, lonsin1, latsin1): global cache_x global cache_y global cache_z cache_x=[] cache_y=[] cache_z=[] dim1=imgin1.shape nlat1=dim1[0] nlon1=dim1[1] k=0 for iy in range(0,nlat1): for ix in range(0,nlon1): zz=imgin1[iy,ix] cache_x.append(lonsin1[ix]) cache_y.append(latsin1[iy]) if (str(zz) == '--'): ## warning there 0 append to equalize grid cache_z.append(0) k=1 else: cache_z.append(zz)


#cache_z.append(imgin1[iy,ix])

return(0)

def gdal_reso(inname1, outname1, reso1): jono1="gdalwarp -tr "+str(reso1)+" "+str(reso1)+" "+inname1+" "+outname1 os.system(jono1) return(0)

def gdal_cut_tonc(inname1, outname1, lon1, lon2, lat1, lat2):

jono1="gdalwarp -te "+str(lon1)+" "+str(lat1)+" "+str(lon2)+" "+str(lat2)+" "+inname1+" "+outname1 print(jono1) os.system(jono1)

return(0)

def savenetcdf_single_frommem(outfilename1, outvarname1, xoutvalue1,xoutlats1,xoutlons1): nlat1=len(xoutlats1) nlon1=len(xoutlons1) #indata_set1=indata1 ncout1 = nc.Dataset(outfilename1, 'w', format='NETCDF4') outlat1 = ncout1.createDimension('lat', nlat1) outlon1 = ncout1.createDimension('lon', nlon1) outlats1 = ncout1.createVariable('lat', 'f4', ('lat',)) outlons1 = ncout1.createVariable('lon', 'f4', ('lon',)) outvalue1 = ncout1.createVariable(outvarname1, 'f4', ('lat', 'lon',)) outvalue1.units = 'Unknown' outlats1[:] = xoutlats1 outlons1[:] = xoutlons1 outvalue1[:, :] =xoutvalue1[:] ncout1.close() return 0

def loadnetcdf_single_tomem(infilename1, invarname1): global cache_lons1 global cache_lats1 inc1 = nc.Dataset(infilename1) inlatname1="lat" inlonname1="lon" inlats1=inc1[inlatname1][:] inlons1=inc1[inlonname1][:] cache_lons1=inlons1 cache_lats1=inlats1 indata1_set1 = inc1[invarname1][:] dim1=indata1_set1.shape nlat1=dim1[0] nlon1=dim1[1] inc1.close() return (indata1_set1)

def loadnetcdf_single_tofile(infilename1, invarname1, outfilename1, outvarname1): inc1 = nc.Dataset(infilename1) inlatname1="lat" inlonname1="lon" inlats1=inc1[inlatname1][:] inlons1=inc1[inlonname1][:] indata1_set1 = inc1[invarname1][:] dim1=indata1_set1.shape nlat1=dim1[0] nlon1=dim1[1] #indata_set1=indata1 ncout1 = nc.Dataset(outfilename1, 'w', format='NETCDF4') outlat1 = ncout1.createDimension('lat', nlat1) outlon1 = ncout1.createDimension('lon', nlon1) outlats1 = ncout1.createVariable('lat', 'f4', ('lat',)) outlons1 = ncout1.createVariable('lon', 'f4', ('lon',)) outvalue1 = ncout1.createVariable(outvarname1, 'f4', ('lat', 'lon',)) outvalue1.units = 'Unknown' outlats1[:] = inlats1 outlons1[:] = inlons1 outvalue1[:, :] = indata1_set1[:] ncout1.close() return 0

def loadnetcdf_timed_tofile(infilename1, invarname1, intime1, outfilename1, outvarname1): inc1 = nc.Dataset(infilename1) inlatname1="lat" inlonname1="lon" inlats1=inc1[inlatname1][:] inlons1=inc1[inlonname1][:] indata1 = inc1[invarname1][:] dim1=indata1.shape nlat1=dim1[1] nlon1=dim1[2] indata_set1=indata1[intime1] #img01=indata_set1 #img1.replace(np.nan, 0, inplace=True) #img1 = np.where(isna(img10), 0, img10) #where_are_NaNs = np.isnan(img1) #img1[where_are_NaNs] = 99 #img1 = np.where(np.isnan(img01), 0, img01) ncout1 = nc.Dataset(outfilename1, 'w', format='NETCDF4') outlat1 = ncout1.createDimension('lat', nlat1) outlon1 = ncout1.createDimension('lon', nlon1) outlats1 = ncout1.createVariable('lat', 'f4', ('lat',)) outlons1 = ncout1.createVariable('lon', 'f4', ('lon',)) outvalue1 = ncout1.createVariable(outvarname1, 'f4', ('lat', 'lon',)) outvalue1.units = 'Unknown' outlats1[:] = inlats1 outlons1[:] = inlons1 outvalue1[:, :] = indata_set1 ncout1.close() return 0

        1. main program

input_rasters=0 ## preprocess rasters calc_species=1 ## human habitation test

    1. acquire basic data, loadfiles=1

loadfiles=1

    1. number of inoput time slice

intime1=7

    1. lat, lon rectengle

lat1=30 lat2=70 lon1=-20 lon2=80

  1. lat1=40
  2. lat2=50
  3. lon1=-10
  4. lon2=10

width1=1000 height1=400

roto=1

areaname="europe"

sresultfilename1="out.nc" sresultvarname1="habitat"

infilenames1=[] invarnames1=[] outfilenames1=[] outvarnames1=[] soutfilenames1=[] soutvarnames1=[]

infilenames1.append('D:/varasto_iceagesimu/grassFrac_Lmon_MPI-ESM-P_lgm_r1i1p1_185001-194912.nc')

  1. infilenames1.append('D:/razter2/CHELSA_PMIP_CCSM4_BIO_12.tif') # prec

infilenames1.append('D:/razter2/CHELSA_PMIP_CCSM4_tmean_07.tif')

  1. infilenames1.append('D:/razter2/CHELSA_PMIP_CCSM4_tmean_01.tif')

infilenames1.append('D:/razter2/CHELSA_PMIP_CCSM4_BIO_01.tif') # temp infilenames1.append('./predata/lgm_ice_sheet.nc') # ice sheet infilenames1.append('C:/Users/himot/aenvirem1/europa/ptopet.nc')

  1. infilenames1.append('C:/Users/himot/aenvirem1/europa/gdd5.nc')
  2. infilenames1.append('D:/razter1/high_longlat.tif')

invarnames1.append("grassFrac") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1")

outvarnames1.append("grassFrac") outvarnames1.append("bio12") outvarnames1.append("tmean7") outvarnames1.append("tmean1") outvarnames1.append("bio01") outvarnames1.append("gdd5") outvarnames1.append("ptopet")

soutvarnames1.append("grassFrac") soutvarnames1.append("Band1") soutvarnames1.append("Band1") soutvarnames1.append("Band1") soutvarnames1.append("Band1") soutvarnames1.append("Band1") soutvarnames1.append("Band1")

if(input_rasters==1): os.system("del *.nc")

outfilenames1.append("./"+areaname+"_"+invarnames1[0]+".nc") if(input_rasters==1): preprocess_small_raster(infilenames1[0], invarnames1[0], intime1, outfilenames1[0], outvarnames1[0], lon1, lon2, lat1, lat2,roto)

huba=len(infilenames1)

for n in range (1,huba): outfilenames1.append("./"+areaname+"_"+outvarnames1[n]+".nc") soutfilenames1.append("./"+areaname+"_"+outvarnames1[n]+"_s.nc") if(input_rasters==1): preprocess_big_raster(infilenames1[n], invarnames1[n], outfilenames1[0], outfilenames1[n], soutfilenames1[n-1], outvarnames1[n], lon1, lon2, lat1, lat2, width1, height1, roto)

imgs=[] pointsx=[] pointsy=[] pointsz=[] mlats=[] mlons=[]

spointsx=[] spointsy=[] spointsz=[] simgs=[] slats=[] slons=[]

len1=len(outfilenames1)

for n in range(0,len1): imgs.append(loadnetcdf_single_tomem(outfilenames1[n], "Band1")) mlons.append(cache_lons1) mlats.append(cache_lats1) makepoints(imgs[n], mlons[n], mlats[n]) pointsx.append(cache_x) pointsy.append(cache_y) pointsz.append(cache_z)

len1=len(soutfilenames1)

for n in range(0,len1): simgs.append(loadnetcdf_single_tomem(soutfilenames1[n], "Band1")) slons.append(cache_lons1) slats.append(cache_lats1) makepoints(simgs[n], slons[n], slats[n]) spointsx.append(cache_x) spointsy.append(cache_y) spointsz.append(cache_z)


if(calc_species==1): print("Species habitation.") load_xy("humanlgm.txt","Lon", "Lat") klats1=cache_lats1 klons1=cache_lons1

x=np.array(cache_lons1) y=np.array(cache_lats1)

xx, yy, zz = kde_raster(x, y,1, lon1, lat1, lon2, lat2, complex(width1), complex(height1))

lonsa=xx.T[0] latsa=yy[0]

get_z_points_from_memory(zz,lonsa, latsa, klons1, klats1)

densitypoints=cache_z

sampx1=pointsx[0] sampy1=pointsy[0]

get_z_points_from_memory(zz,lonsa, latsa, sampx1, sampy1)


densitypoints=cache_z samplex1=cache_x sampley1=cache_y


spointszout=[] ppointsx=[] ppointsy=[] ppointsz=[]

len1=len(infilenames1)-1 for n in range(0,len1): gdal_get_points_from_file(outfilenames1[n+1], invarnames1[n+1], klons1, klats1) ppointsx.append(cache_x) ppointsy.append(cache_y) ppointsz.append(cache_z)


sla=[] len1=len(pointsz) for n in range(0,len1-1): sla.append(np.array(pointsz[n-1]))

slb=[] for n in range(0,len1-1): slb.append(np.array(spointsz[n]))


apoints1=list(zip(*sla)) bpoints1=densitypoints cpoints1=list(zip(*slb))

spointszout.append(funk_test_00(apoints1, bpoints1, cpoints1)) #spointszout.append(probability_density_multi_vars(apoints1, bpoints1, cpoints1)) #spointszout.append(linear_regression_multiple_vars(apoints1, bpoints1, cpoints1)) #spointszout.append(random_forest_multiple_vars(apoints1, bpoints1, cpoints1)) #spointszout.append(gam_multiple_vars(apoints1, bpoints1, cpoints1)) #sdataout=average_tables(spointszout[1],spointszout[2])

odex1=0 sdataout00=spointszout[0]

maxa=np.max(sdataout00) mina=np.min(sdataout00) deltaa=maxa-mina

sdataout01=(((sdataout00-mina)/deltaa)*100.0)

sdataout01.astype(int) sdataout02=np.where(sdataout01<40,0, sdataout01-40)

maxa=np.max(sdataout02) mina=np.min(sdataout02) deltaa=maxa-mina


sdataout=(((sdataout02-mina)/deltaa)*100.0)

pointsxout1=spointsx[0] pointsyout1=spointsy[0]

slonsout1=slons[0] slatsout1=slats[0]

save_points_to_netcdf(sresultfilename1, sresultvarname1, slonsout1, slatsout1, sdataout)

outzgrid=np.array(sdataout).reshape( height1, width1)

m = Basemap(projection='cyl', llcrnrlat=lat1, urcrnrlat=lat2, llcrnrlon=lon1, urcrnrlon=lon2, resolution='i') m.drawcoastlines() m.drawcountries() m.drawrivers() levels = np.linspace(sdataout.min(), sdataout.max(), 25)

plt.contourf(slonsout1, slatsout1 , outzgrid, levels=levels, cmap=plt.cm.Reds)

plt.show()


print("Done.") quit(-1)

sla=[] len1=len(pointsz)

for n in range(1,len1): sla.append(pointsz[n])

slb=[] for n in range(0,len1-1): print (n) slb.append(spointsz[n])


apoints1=list(zip(*sla)) bpoints1=pointsz[0] cpoints1=list(zip(*slb))

spointszout=[]

spointszout.append(linear_regression_multiple_vars(apoints1, bpoints1, cpoints1)) spointszout.append(random_forest_multiple_vars(apoints1, bpoints1, cpoints1)) spointszout.append(gam_multiple_vars(apoints1, bpoints1, cpoints1))

sdataout=average_tables(spointszout[1],spointszout[2])

  1. sdataout=spointszout[1]

pointsxout1=spointsx[0] pointsyout1=spointsy[0]

slonsout1=slons[0] slatsout1=slats[0]

save_points_to_netcdf(sresultfilename1, sresultvarname1, slonsout1, slatsout1, sdataout)

plt.imshow(np.array(sdataout).reshape(len(slatsout1), len(slonsout1)))


plt.ylim(0, len(slatsout1)) plt.show()

Sites data

Lon;Lat 0.248959292267969;48.2382232173701 -0.325470462716793;48.0785621679185 6.02317505852104;49.2951557335836 8.34482865158445;50.153867184884 11.8153417546174;49.0212369201497 13.2873180017658;48.9191096574132 7.59088959816695;47.709455035682 4.82046275902167;46.3720630297676 4.08447463544747;46.0656957116877 4.12636013841508;46.6356511470423 3.9288999101391;45.1447743615906 4.10840920857183;45.2880863575831 3.59381588639798;45.1363329962802 3.51004488046271;45.2206904095848 3.46217573421398;45.2712448690432 2.85184411954267;46.643867684342 2.91168055235358;46.758768203571 2.349218083931;46.9061381888109 4.3357876532533;47.4673148103602 11.5640287368116;45.5568703449381 11.1810755668217;45.4562263075943 3.79127611467397;47.515832480265 3.80922704451727;47.6772343903265 3.51004488046271;47.5481526935534 3.42627387452743;48.086557022435 2.74413854048303;48.3337813981293 19.528257943944;50.6724363128828 19.7915382483121;50.2610955825263 19.9351456870582;50.138529172223 20.1744914183019;49.5287383327041 18.2836601414771;49.9386872085095 17.9366088311738;48.6984324336702 17.2903753568159;48.5718949601122 17.4579173686865;49.5132007330877 17.0869314852588;49.4120859706974 16.6920110287068;49.2483057946455 16.7159456018311;48.9348351606992 14.4660957281408;48.4609142575879 16.249221425906;48.8482836003954 15.6508570977969;48.7221228817887 15.6628243843591;48.5243615709371 16.0338102677867;48.5322869030909 15.8662682559162;48.3814934517334 14.2387172834594;45.866056797224 16.0936467005976;46.3555463342765 18.0802162699199;46.9143135621808 19.3008794992626;46.1486662645181 20.1984259914263;46.3472861139455 20.5574445882917;48.1345000573429 18.7743188905265;47.5077493145738 18.9059590427105;47.8542016173879 18.5708750189694;47.830105173461 29.8919281067941;49.9463888222076 28.312246280586;48.2860246562238 27.1514194840543;48.6510180725924 26.7684663140645;48.7142273050695 26.4812514365721;48.5956449128672 27.259125063114;48.1584548029475 27.0317466184325;48.3337813981293 26.8881391796863;48.2382232173702 26.6607607350048;47.4673148103602 26.3376439978259;47.0041603721743 25.9786254009604;47.142717821753 13.0300213406789;43.5358327352401 15.5670860918616;41.7936282292632 15.9261046887271;40.0760469764719 18.2238237086661;40.1126661462058 18.0802162699199;40.2224053996106 28.0848678359045;44.4313429884533 27.1753540571787;44.7041661480803 24.0399249778868;43.2575742860116 22.7235234560468;43.7436878467773 18.2716928549149;45.1785273250628 18.1280854161687;44.8400978428107 20.7309702434434;41.1299871143924 20.1685077750208;39.727182852891 20.9104795418761;39.727182852891 21.7122877415423;39.7823847172648 20.8506431090652;39.395043345843 23.196231275253;37.5684449734027 11.7794398949308;43.6138626668477 10.8938606893293;43.074272911103 11.8273090411796;42.6532337643238 11.923047333677;42.459298177568 12.4017387961643;42.459298177568 -9.13937701576427;39.422782372328 -8.32560152953586;39.7547893151501 -8.68462012640133;37.2261822618582 -1.48031361596744;37.8524682245273 0.195106502738117;38.8286264581541 -0.187846667251724;39.0891863125127 2.92364783891574;42.1228995812507 3.11512442391066;43.4229443505996 5.46071259009844;43.3359636207116 1.34396601270764;41.7668560048073 0.003629917743197;42.2824702401768 -2.22228538282276;43.4403255224348 -5.74066763210443;43.6485101806209 -8.75642384577443;39.7363862357604 -8.56494726077951;40.0668891054192 -3.61049062403593;40.5596503876726 -4.08918208652323;38.0223591605297 -2.19835080969839;38.5109457081537 -5.76460220522879;36.460019467409 -5.71673305898006;36.9206385746047 -4.40033153713998;36.7865764545038 -3.80196720903085;36.9397711067791 1.89446119456804;47.9504754788391 2.18167607206042;48.6035590838006 4.76660996949185;44.8485829507466 4.55119881137257;44.0283142843797 2.5406946689259;43.3881670322776 0.745601684598517;43.3359636207116 -0.618668983490293;43.8387144332808 -0.044239228505532;44.8316114850993 1.77478832894622;44.148653977359 0.673797965225421;44.2173095848955 1.5833117439513;43.7350415743018 -1.83933221283291;43.3881670322776 -3.82590178215522;43.5098002904194 -5.04656501149784;43.5098002904194 1.65511546332439;44.6445954277273 1.55937717082693;45.2206904095848 1.91839576769241;45.0349391292476 1.67905003644876;46.5204886010659 1.94233034081677;47.3052654655651 1.24822772021018;47.3377151911363 6.70531039256542;43.5965314198953 7.56695502504257;43.8732320936943 8.04564648752987;44.148653977359 6.1069460644563;43.8041767925782 4.50332966512384;44.4740559570813 4.21611478763146;44.508203839511 -0.403257825371008;45.506570819052 0.649863392101057;44.6275639745576 -0.427192398495373;44.148653977359 1.24822772021018;43.1267008563638 -1.0255567266045;43.7696191725782 -4.71148098775672;43.5271565017702 -4.30459324464252;43.3707708861284 -2.62917312593696;43.4403255224348 -3.94557464777704;43.3881670322776 -5.81247135147752;43.4229443505996 -2.58130397968823;43.2488581222097 -0.451126971619738;45.0856585589659 0.458386807106135;45.1532144771222 1.15248942771272;44.6105275226309 1.17642400083709;45.0856585589659 0.601994245852326;45.4226383073099 0.937078269593437;46.9306605679274 1.24822772021018;46.6849316551859 0.314779368359945;45.6573344459973 1.32003143958328;45.2375468950702 1.20035857396145;44.8994673529734 -5.52525647398514;36.7098640154208 -5.2141070233684;36.8440604383833 -4.92689214587601;36.8823591061342 -4.06524751339887;37.0353617099913 -4.08918208652324;37.5304930021437 -3.51475233153847;37.0926583926782 -3.25147202717046;37.2261822618582 -1.83933221283292;37.3594701352906 -2.0547433709522;37.3404434707763 -2.07867794407657;37.9846395600665 -1.48031361596744;38.2106659393478 -0.738341849112125;38.3797265811846 -0.427192398495378;38.9031698858952 -0.044239228505537;38.9217935285666 -0.379323252246648;39.0891863125127 -8.08625579829221;38.6419261330417 -7.48789147018308;42.6180175802716 -6.91346171519832;42.9517664724742 -2.15048166344966;43.0392960078135 -1.59998648158927;43.1267008563638 -1.48031361596744;43.5791951797614 -0.355388679122282;43.7523328706596 -0.283584959749187;44.0799184183039 1.03281656209089;43.213980992628 1.41576973208073;43.2314220522468 -3.41901403904101;43.3707708861284 -3.6344251971603;43.405558187158 -3.65835977028467;43.561853946595 -6.02788250959681;43.561853946595 -8.85216213827189;39.4782273161934 -9.35478817388356;38.7726676068199 -9.35478817388356;39.1820110748149 -9.33085360075919;39.4597505740167 -9.21118073513737;38.9962392216343 -9.23511530826173;39.2376471801766 -8.58888183390387;39.6074270921491 -8.68462012640133;39.9018369392061 -8.44527439515768;40.194987219913 -2.31802367532022;43.1965349435417 -4.06524751339887;43.5271565017702 -4.49606982963744;43.4229443505996 -5.02263043837348;43.3533697488819 -5.6209947664826;43.405558187158 0.841339977095973;44.9164188221005 1.67905003644875;46.7505691326905 0.817405403971606;46.504016841466 0.793470830847241;46.156956451219 -0.283584959749187;45.8076932469028 0.482321380230495;45.791006703284 0.4105176608574;45.5736268570226 0.027564490867559;45.0180226543298 1.87052662144367;44.8655494172844 1.70298460957312;45.0349391292476 1.46363887832946;44.9333652920693 1.32003143958327;44.7806670936732 0.147237356489383;44.8655494172844 0.990931059123252;45.6154962703381 1.40978608879964;45.4394348067478 1.63716453348111;45.2880863575831 1.46962252161055;45.2375468950702 1.01486563224762;45.060304468171 1.08666935162071;44.6701332347303 -0.265634029905915;44.8909897437284 -0.205797597095003;44.5934860719867 -0.325470462716828;45.1363329962802 0.524206883198133;45.2038289250683 0.715683468193053;45.2712448690432 0.787487187566148;45.4394348067478 1.03880020537198;45.4730128098952 1.12257121130726;45.3469922011284 0.907160053187974;45.2965052272537 0.141253713208292;45.6322352890653 0.907160053187974;46.6109940485112 0.835356333814879;47.0286378647561 1.06273477849635;46.7833579316585 0.895192766625792;44.508203839511 0.703716181630871;45.0856585589659 0.691748895068688;44.9757095956275 1.0986366381829;45.1532144771222 40.0;54.5 50.0;60.0 60.0;60.0 31.249444;30.064722 23.716667;37.966667 30.523333;50.45 26.255556;47.651389 19.945;50.064722 39.052;51.386 5.37;43.2964 12.5;41.883333 16.366667;48.2 19.051389;47.4925

R script to produce PTOPET

data

    1. "R" script: calculate gdd5, pet and ptopet rasters with Envirem package
    2. default: area of france
    3. with envirem
    4. source: chelsa ccsm4 lgm files
    5. version 0001b
    6. 9.10.2010

library(raster) library(rgeos) library(maps) library(viridis) library(envirem) library(base) library(stringr) library(gtools)

area_1=2 stage_1=1 stage_2=1 stage_3=1 stage_4=1 rsaaga_1=1

    1. france

if (area_1==1) { extent1 <- extent(-10,10,40,50) poly <- readWKT("POLYGON((

  -10 50,
  10 50,
  10 40,
  -10 40,
  -10 50

))", p4s = CRS("+proj=longlat +datum=WGS84"))

}

    1. europe

if (area_1==2) { extent1 <- extent(-20,80,30,70)

poly <- readWKT("POLYGON((

 -20 70,
  80 70,
  80 30,
 -20 30,
 -20 70

))", p4s = CRS("+proj=longlat +datum=WGS84"))

}

inputDir <- "./climate" outputDir <- "./snapshot" processpath2<-"./snapshot2" outdir1 <- './envirem1' dempath1<-"D:/razter1/high_longlat.tif"

name1='./envirem1/test_growingDegDays5.tif' name2='./envirem1/test_annualPET.tif'

outname1="gdd5.nc" outname2="pet.nc" outname3="ptopet.nc" outname4="precipann.nc" outdpath1="./twi1.nc"

processpath1<-outputDir indir1<-processpath2

Sys.which('gdalinfo') Sys.which('gdal_translate')

list.files(inputDir)

dir.create(outputDir) dir.create(processpath2)

    1. creation selection ractangle

makerasters=1 calculate_average_temp=1

if(stage_1==1) {

  1. plot(poly, border = 'blue', xpd = NA, add = TRUE)
  2. crop and mask input rasters

files <- list.files(inputDir, pattern = '.tif$', full.names = TRUE)

  1. read in a precipitation raster and crop/mask and keep as a mask layer
    1. precipitation rasters have the term "_pr_" in the file name
    2. crop/mask using polygon

grep(files, pattern = '_prec_', value=TRUE)[1]

precipRaster <- raster(grep(files, pattern = '_prec_', value=TRUE)[1])

  1. precipRaster <- crop(precipRaster, poly)

precipRaster <- mask(precipRaster, poly)

precipRaster <- crop(precipRaster, extent1)

  1. precipRaster <- mask(precipRaster, extent1)
  1. create a terrestrial/marine mask by setting values < 0 to NA, and values >= 0 to 1

precipRaster[precipRaster < 0] <- NA precipRaster[!is.na(precipRaster)] <- 1

plot(precipRaster, col = 'blue', legend = FALSE) title(main = 'terrestrial mask')

  1. supply some additional flags to writeRaster() to employ compression to achieve smaller file sizes

tifOptions <- c("COMPRESS=DEFLATE", "PREDICTOR=2", "ZLEVEL=6")

for (i in 1:length(files)) {

 cat(i, ' ')
 r <- raster(files[i])
 #r <- crop(r, poly)
 r <- crop(r, extent1)
 # apply terrestrial mask
 r <- mask(r, precipRaster)
 
 # if temperature, divide values by 10
 ## recognize temperature files as those containing the terms tasmin or tasmax
 if (grepl('tmin|tmax', files[i])) {

# print("t ")

   r <- r / 10
   r <- r-273.15
 }
 
 r <- aggregate(r, fact = 2)
 
 outfile <- paste0(outputDir, '/', names(r), '.tif')
 writeRaster(r, filename = outfile, format = 'GTiff', options = tifOptions, overwrite = TRUE)

}

}

if(stage_2==1) {

old_files <- list.files(processpath1, pattern = "*.tif", full.names = TRUE)

  1. old_files

old_files_2=mixedsort(old_files)

len1=length(old_files)

print (len1)

  1. old_files_2

precipfiles=grep('prec', old_files_2, value=TRUE)

print (precipfiles)

len2=length(precipfiles)

for (n in 1:len2) { infilee=precipfiles[n] #print (infilee) a=sprintf("%02d",n) #print (a) newfileename=paste0(processpath2, "/test_precip_",a,".tif") print (newfileename) file.copy(from = infilee, to = newfileename)

}

tminfiles=grep('tmin', old_files_2, value=TRUE)

print (tminfiles)

len2=length(tminfiles)

for (n in 1:len2) { infilee=tminfiles[n] #print (infilee) a=sprintf("%02d",n) #print (a) newfileename=paste0(processpath2,"/test_tmin_",a,".tif") print (newfileename) file.copy(from = infilee, to = newfileename) }

tmaxfiles=grep('tmax', old_files_2, value=TRUE)

print (tmaxfiles)

len2=length(tmaxfiles)

for (n in 1:len2) { infilee=tmaxfiles[n] #print (infilee) a=sprintf("%02d",n) #print (a) newfileename=paste0(processpath2,"/test_tmax_",a,".tif") print (newfileename) file.copy(from = infilee, to = newfileename) }

rasterTemplate <- raster('./snapshot2/test_precip_01.tif')

ETsolradRasters(rasterTemplate = rasterTemplate, year = -25000, outputDir = processpath2, overwrite = TRUE)

}

if(stage_3==1) {

old_files <- list.files(processpath1, pattern = "*.tif", full.names = TRUE)

  1. old_files

old_files_2=mixedsort(old_files)

tmaxfiles=grep('tmin', old_files_2, value=TRUE)

print (tmaxfiles)

len2=length(tmaxfiles)

print("Tmean ..")

for (n in 1:len2) { a=sprintf("%02d",n) infileename1=paste0(processpath2,"/test_tmax_",a,".tif") infileename2=paste0(processpath2,"/test_tmin_",a,".tif") outfileename1=paste0(processpath2,"/test_tmean_",a,".tif") #print (n) print (outfileename1) r1=raster(infileename1) #print("RK") r2=raster(infileename2)

#plot(r2) #print("R3 ...") r3=(r1+r2)/2

#plot(r3) #print("WR ...") writeRaster(r3, filename = outfileename1, format = 'GTiff',overwrite = TRUE)

#file.copy(from = infilee, to = newfileename) }

}

if(stage_4==1) {

dir.create(outdir1)

assignNames(reset = TRUE)

assignNames(tmax = "test_tmax_##",

          tmin = "test_tmin_##",
          tmean="test_tmean_##",
          precip = "test_precip_##",
          solrad = "et_solrad_##"
          )

chelsaFiles <- list.files(processpath2, pattern = 'test', full.names = TRUE)

chelsaStack <- stack(chelsaFiles) solarFiles <- list.files(processpath2, pattern = 'solrad', full.names = TRUE) solarStack <- stack(solarFiles) print(solarFiles)

verifyFileStructure(processpath2, returnFileNames = FALSE)

verifyRasterNames(chelsaStack, solradstack = solarStack)

varnames()

inputDir <- indir1 outputDir <- outdir1

print ("Generating rasters") generateRasters(var = 'growingDegDays5',prefix = 'test_',maindir = indir1,outputDir = outdir1)


  1. print("PET")

generateRasters(var = 'annualPET',prefix = 'test_',maindir = indir1,outputDir = outdir1)


  1. read in the resulting raster

result <- raster(name1) plot(result, col = inferno(100)) writeRaster(result, filename = outname1, format = 'netCDF',overwrite = TRUE)

result <- raster(name2) plot(result, col = inferno(100)) writeRaster(result, filename = outname2, format = 'netCDF',overwrite = TRUE)

print(" Prec ..." ) rasterFiles <- list.files(indir1, pattern = '.tif$', full.names = TRUE)

env <- stack(rasterFiles) precip <- grep('prec', names(env), value=TRUE) precip <- stack(envprecip)

annualprecip<-sum(precip)/10

pet=raster(outname2)

ptopet=annualprecip/pet

plot(annualprecip, col = inferno(100)) plot(ptopet, col = inferno(100))

writeRaster(annualprecip, filename = outname4, format = 'netCDF',overwrite = TRUE)

writeRaster(ptopet, filename = outname3, format = 'netCDF',overwrite = TRUE)

}

if (rsaaga_1==1) {


e1 <- extent1

elev0<-raster(dempath1)

alt <- crop(elev0, e1)

plot(alt)

tempInfile <- 'saga_temp_twi_in' writeRaster(alt, filename=tempInfile, format='SAGA',overwrite = TRUE)

env1 <- rsaga.env()

outfile <- 'saga_temp_twi_out.sgrd' call <- rsaga.wetness.index(in.dem='saga_temp_twi_in.sgrd', out.wetness.index=outfile, env=env1)

twi1 <- raster('saga_temp_twi_out.sdat')

plot(alt)

plot(twi1)

writeRaster(twi1, filename = outdpath1, format = 'netCDF',overwrite = TRUE)


}

Licensing

[edit]
I, the copyright holder of this work, hereby publish it under the following license:
w:en:Creative Commons
attribution share alike
This file is licensed under the Creative Commons Attribution-Share Alike 4.0 International license.
You are free:
  • to share – to copy, distribute and transmit the work
  • to remix – to adapt the work
Under the following conditions:
  • attribution – You must give appropriate credit, provide a link to the license, and indicate if changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use.
  • share alike – If you remix, transform, or build upon the material, you must distribute your contributions under the same or compatible license as the original.

File history

Click on a date/time to view the file as it appeared at that time.

Date/TimeThumbnailDimensionsUserComment
current13:45, 20 October 2020Thumbnail for version as of 13:45, 20 October 20201,200 × 795 (1.74 MB)Merikanto (talk | contribs)Uploaded own work with UploadWizard

There are no pages that use this file.

Metadata