File:Beringia 20000bp january depth of snow cm 1.png

From Wikimedia Commons, the free media repository
Jump to navigation Jump to search

Original file(1,472 × 976 pixels, file size: 574 KB, MIME type: image/png)

Captions

Captions

Depth of snow in cm, East Beringia, 20 000 years ago

Summary[edit]

Description
English: Depth of snow in cm, East Beringia, 20 000 years ago
Date
Source Own work
Author Merikanto
Camera location65° 00′ 00″ N, 170° 00′ 00″ W  Heading=180° Kartographer map based on OpenStreetMap.View this and other nearby images on: OpenStreetMapinfo

This image is based on HADCM3 60 ka simulation and CHELSA Trace bioclimatic variables.

Python "Random forest" downscaling against bio11 and bio19, Panoply visualization.

https://chelsa-climate.org/

Karger, D.N., Conrad, O., Böhner, J., Kawohl, T., Kreft, H., Soria-Auza, R.W., Zimmermann, N.E., Linder, P., Kessler, M. (2017). Climatologies at high resolution for the Earth land surface areas. Scientific Data. 4 170122. https://doi.org/10.1038/sdata.2017.122

Karger D.N., Conrad, O., Böhner, J., Kawohl, T., Kreft, H., Soria-Auza, R.W., Zimmermann, N.E,, Linder, H.P., Kessler, M.. Data from: Climatologies at high resolution for the earth’s land surface areas. Dryad Digital

Armstrong et al. 2019 A simulated Northern Hemisphere land based climate dataset for the past 60,000 years

https://www.paleo.bristol.ac.uk/ummodel/scripts/papers/ https://www.nature.com/articles/s41597-019-0277-1

https://data.ceda.ac.uk/badc/deposited2018/HadCM3B_60Kyr_Climate/data/snow

Code to downscale.

    1. hadcm3 60ka chelsa trace downscaler
    2. 14.5.2021, v 0010

import matplotlib.pyplot as plt import numpy as np

import os

import netCDF4 as nc

from osgeo import gdal from osgeo import osr from osgeo import gdalconst

from mpl_toolkits.basemap import shiftgrid

from sklearn.linear_model import LinearRegression from sklearn.preprocessing import PolynomialFeatures from sklearn.pipeline import make_pipeline from sklearn.ensemble import RandomForestRegressor from sklearn.preprocessing import StandardScaler from sklearn.model_selection import train_test_split from sklearn import svm, metrics from pygam import LogisticGAM from pygam import LinearGAM from scipy import exp, mgrid, signal, row_stack, column_stack, tile, misc


def process_raster(iname1,oname1,lon1, lat1, lon2, lat2, width1, height1):

bigrname1="./work/big_"+oname1+"_r.tif" bigname1="./work/big_"+oname1+".tif" bigname2="./work/big_"+oname1+".nc" midiname1="./work/midi_"+oname1+".tif" midiname2="./work/midi_"+oname1+".nc" ds = gdal.Translate(bigrname1, iname1, projWin=[lon1, lat2, lon2, lat1]) ds = 0 ds = gdal.Warp(bigname1, bigrname1, width=width1, height=height1, resampleAlg='bilinear') ds = 0 raster_reso_to_reference_raster(bigrname1,"./work/smallfile.tif" , midiname1) ds = 0 ds = gdal.Translate(midiname2, midiname1, format='NetCDF' ) ds=0 ds = gdal.Translate(bigname2, bigname1, format='NetCDF' ) ds=0

return()


def sea_proximity(srcname, dstname): src_ds = gdal.Open(srcname) srcband=src_ds.GetRasterBand(1) drv = gdal.GetDriverByName('GTiff') dst_ds = drv.Create(dstname,src_ds.RasterXSize, src_ds.RasterYSize, 1,gdal.GetDataTypeByName('Float32')) dst_ds.SetGeoTransform( src_ds.GetGeoTransform() ) dst_ds.SetProjection( src_ds.GetProjectionRef() ) dstband = dst_ds.GetRasterBand(1) gdal.ComputeProximity(srcband,dstband,["VALUES='0,-1'","DISTUNITS=GEO"]) srcband = None dstband = None src_ds = None dst_ds = None return(0)


def create_landsea_x(neofile1, neofile2):

print(neofile1) array1, lats, lons=load_raster_float32(neofile1)

lon1=np.min(lons) lat1=np.min(lats) lon2=np.max(lons) lat2=np.max(lats)

nx=len(lons) ny=len(lats)

array1[array1 < 0.0] = 0.0 array1[array1 > 0.0] = 1.0 array2=array1.astype(np.float32) array3=np.flipud(array2)

drivername1="GTiff" x_save_raster(neofile2, drivername1, array2, lons, lats) drivername2="NetCDF" neofile3=neofile2.replace(".tif", ".nc") x_save_raster(neofile3, drivername2, array2, lons, lats)

return(0)


def x_save_raster(outname1, rastype1, rasdata1, lonz1, latz1): ## with netcdf ok xmin=np.min(lonz1) ymin=np.min(latz1) xmax=np.max(lonz1) ymax=np.max(latz1) nx=len(lonz1) ny=len(latz1)

xres = (xmax - xmin) / float(nx) yres = (ymax - ymin) / float(ny) geotransform = (xmin, xres, 0, ymax, 0, -yres)

#dst_ds = gdal.GetDriverByName(rastype1).Create(outname1, nx, ny, 1, gdal.GDT_Float32, options=['COMPRESS=DEFLATE']) dst_ds = gdal.GetDriverByName(rastype1).Create(outname1, nx, ny, 1, gdal.GDT_Float32) dst_ds.SetGeoTransform(geotransform) # specify coords srs = osr.SpatialReference() # establish encoding srs.ImportFromEPSG(4326) # lat/long dst_ds.SetProjection(srs.ExportToWkt()) # export coords to file dst_ds.GetRasterBand(1).WriteArray(rasdata1) # write r-band to the raster dst_ds.FlushCache() # write to disk dst_ds = None return(0)


def sea_proximity(srcname, dstname): src_ds = gdal.Open(srcname) srcband=src_ds.GetRasterBand(1) drv = gdal.GetDriverByName('GTiff') dst_ds = drv.Create(dstname,src_ds.RasterXSize, src_ds.RasterYSize, 1,gdal.GetDataTypeByName('Float32')) dst_ds.SetGeoTransform( src_ds.GetGeoTransform() ) dst_ds.SetProjection( src_ds.GetProjectionRef() ) dstband = dst_ds.GetRasterBand(1) gdal.ComputeProximity(srcband,dstband,["VALUES='0,-1'","DISTUNITS=GEO"]) srcband = None dstband = None src_ds = None dst_ds = None return(0)

def rasterize_shapefile(shpath1, outfile1, lon1, lat1, lon2, lat2, cols, rows): outfile2=outfile1.replace("tif","nc") tempofile1="temp1.tif" kom1="gdal_rasterize -burn 255 -ot Float32" kom2=" -te "+str(lon1)+" "+str(lat1)+" "+str(lon2)+" "+str(lat2) kom3=" -ts "+str(cols)+" "+str(rows) kom4=" "+shpath1+" "+tempofile1 kommandoo1=kom1+kom2+kom3+kom4 koma2="gdal_calc.py -A "+tempofile1+ " --calc=\"(A>200)*99\" --outfile=temp2.tif" koma3="gdal_calc.py -A temp2.tif --calc=\"(A<1)*1\" --outfile="+outfile1

#koma4="gdal_calc.py -A temp3.tif --calc=\"(A>97)*0\" --outfile=temp4.tif"

kommandoo2="gdal_translate -of netcdf -ot Float32 "+outfile1+" "+tempofile1

print (kommandoo1) #print (kommandoo2) os.system(kommandoo1) os.system(koma2) os.system(koma3) #os.system(koma4) #river_proximity("temp3.tif", "trivo.tif")

os.system(kommandoo2) return(0)


def river_proximity(srcname, dstname): src_ds = gdal.Open(srcname) srcband=src_ds.GetRasterBand(1) ## jn waruning test code #srcarray=src_ds.ReadAsArray() #srcarray=srcarray*0 #srcband.WriteArray(srcarray)

drv = gdal.GetDriverByName('GTiff') dst_ds = drv.Create(dstname,src_ds.RasterXSize, src_ds.RasterYSize, 1,gdal.GetDataTypeByName('Float32')) dst_ds.SetGeoTransform( src_ds.GetGeoTransform() ) dst_ds.SetProjection( src_ds.GetProjectionRef() ) dstband = dst_ds.GetRasterBand(1)


gdal.ComputeProximity(srcband,dstband,["VALUES='0,-1'","DISTUNITS=GEO"]) srcband = None dstband = None src_ds = None dst_ds = None return(0)




def gaussian_blur_sub(in_array, size): ny, nx = in_array.shape ary = row_stack((tile(in_array[0,:], size).reshape((size, nx)),in_array,tile(in_array[-1,:], size).reshape((size, nx)))) arx = column_stack((tile(ary[:,0], size).reshape((size, ny + size*2)).T,ary,tile(ary[:,-1], size).reshape((size, ny + size*2)).T)) x, y = mgrid[-size:size+1, -size:size+1] g = exp(-(x**2/float(size) + y**2/float(size))) g = (g / g.sum()) #.astype(in_array.dtype) return signal.convolve(arx, g, mode='valid')

def gaussian_blur(src1, dst1, size1): source = gdal.Open(src1) gt = source.GetGeoTransform() proj = source.GetProjection() band_array = source.GetRasterBand(1).ReadAsArray() blurredArray = gaussian_blur_sub(band_array, size1) #driver = gdal.GetDriverByName('GTIFF') driver = gdal.GetDriverByName('NetCDF') driver.Register() cols = source.RasterXSize rows = source.RasterYSize bands = source.RasterCount band = source.GetRasterBand(1) datatype = band.DataType output = driver.Create(dst1, cols, rows, bands, datatype) output.SetGeoTransform(gt) output.SetProjection(proj) outBand = output.GetRasterBand(1) outBand.WriteArray(blurredArray, 0, 0) source = None # close raster output = None band = None outBand = None return(0)


def filter_nas_out_from_small_and_midi(smal, midis):

smali=smal

meduza=[]

num1=len(midis) print("") print("staksize1 : ", num1) print("")

#smala=np.ravel(smal)

#s1=len(smala)

#print(s1)

#plt.imshow(smal) #plt.imshow(midis[0])

smak=np.where(smal==9.96920997e+36, smal, smal*0+1) #smac=np.where(smak < 10000.0, smak, 1)

smali=smak*smal

for n in range(0,num1): print(n) m1=smak*midis[n] meduza.append(m1)


#plt.imshow(smali)

print(smali)

#plt.show()


return(smali, meduza)





def rm_create_dir(path1): try: os.rmdir(path1) except OSError: print(" ")

try: os.mkdir(path1) except OSError: print ("Create dir %s failed" % path1) else: print ("Created dir %s " % path1)

return(0)

def create_dirs():

rm_create_dir("work") #rm_create_dir("work") return(0)




def random_forest_multiple_vars( x1, y1, x2): print ("RF /2") xa1=np.array(x1).astype(float) ya1=np.array(y1).astype(float) xa2=np.array(x2).astype(float)

#print (xa1)

  1. quit(-1)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) #y=ya1.reshape((-1, 1)) y=ya1.ravel() xb=xa2.reshape((-1, dim2))


#model = LinearRegression().fit(x, y) #degree=3 #polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) #model=polyreg.fit(x,y)

sc = StandardScaler() xa = sc.fit_transform(x) xba = sc.transform(xb)


model = RandomForestRegressor(n_estimators=10, max_features=2).fit(xa,y)


y2= model.predict(xba) return(y2)


def rotate_to_180(iname1, oname1): r1,la1,lo1=load_raster_float32(iname1)

## hadcm3 grid! #print(lo1) r2, lo2 = shiftgrid(179.75, r1, lo1, start=True) rastype1="GTiff" lo2=lo2-360.0

cols=len(lo2) rows=len(la1) lon1=min(lo2) lon2=max(lo2) lat1=min(la1) lat2=max(la1) #print(lon1,lon2)

save_raster_compress(oname1, "GTiff", r2,lon1,lat1,lon2,lat2,cols,rows) save_raster_compress("output.nc", "NetCDF", r2,lon1,lat1,lon2,lat2,cols,rows) return(0)

def rotate_to_360(iname1, oname1): r1,la1,lo1=load_raster_float32(iname1)

## hadcm3 grid! #print(lo1) r2, lo2 = shiftgrid(0.0, r1, lo1, start=True) rastype1="GTiff" #lo2=lo2-360.0

cols=len(lo2) rows=len(la1) lon1=min(lo2) lon2=max(lo2) lat1=min(la1) lat2=max(la1) #print(lon1,lon2)

save_raster_compress(oname1, "GTiff", r2,lon1,lat1,lon2,lat2,cols,rows) save_raster_compress("r360.nc", "NetCDF", r2,lon1,lat1,lon2,lat2,cols,rows) return(0)


def linear_regression_multiple_vars( x1, y1, x2): print ("MLR") xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2))


#model = LinearRegression().fit(x, y) degree=3 polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) model=polyreg.fit(x,y)


y2= model.predict(xb) return(y2)

def gam_multiple_vars( x1, y1, x2): print ("GAM") xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2)

#print (xa1)

  1. quit(-1)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2))

#sc = StandardScaler() #xa = sc.fit_transform(x) #xba = sc.transform(xb)

#model = LogisticGAM().fit(x, y) model = LinearGAM().fit(x, y)

y2= model.predict(xb)

return(y2)


def linear_regression_singe_var( x1, y1, x2): #print (x1) #print(y1) #return(0)

#xa1=np.array(x1) #ya1=np.array(y1) #xa2=np.array(x2) xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2)

sha1=np.shape(x1)

#dim2=sha1[1]

#x=xa1.reshape((-1, dim2)) #y=ya1.reshape((-1, 1)) #xb=xa2.reshape((-1, dim2)) x=xa1.reshape((-1, 1)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, 1))

#print (x) #print (y)

#model = LinearRegression().fit(x, y) #model = LogisticGAM().fit(x, y) model = LinearGAM().fit(x, y) #model = RandomForestRegressor(n_estimators=10, max_features=1).fit(x,y) degree=2 polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) ##polyreg=make_pipeline(PolynomialFeatures(degree), ) #model=polyreg.fit(x,y)

## warning not xb y2= model.predict(xb) #print(y2) #print("LR") return(y2)

def load_raster_float32(filename): print(filename) raster = gdal.Open(filename) print(raster) geotransform = raster.GetGeoTransform() originX = geotransform[0] originY = geotransform[3] pixelWidth = geotransform[1] pixelHeight = geotransform[5] d1 = geotransform[2] d2 = geotransform[4] #array = np.array(raster.ReadAsArray()).astype(float) array0 = raster.ReadAsArray() #array=array0 # jn test array=array0.astype(float) cols = array.shape[1] rows = array.shape[0] limitX=cols*pixelWidth+originX limitY=rows*pixelHeight+originY lons=np.array(np.arange(originX, limitX,pixelWidth)) lats=np.array(np.arange(originY, limitY,pixelHeight))


return(array, lats, lons)

def raster_reso_to_reference_raster(inputfile, referencefile, outputfile): print(inputfile, referencefile, outputfile) input = gdal.Open(inputfile, gdalconst.GA_ReadOnly) inputProj = input.GetProjection() inputTrans = input.GetGeoTransform() reference = gdal.Open(referencefile) referenceProj = reference.GetProjection() referenceTrans = reference.GetGeoTransform() bandreference = reference.GetRasterBand(1) x = reference.RasterXSize y = reference.RasterYSize driver= gdal.GetDriverByName('GTiff') output = driver.Create(outputfile,x,y,1,bandreference.DataType) output.SetGeoTransform(referenceTrans) output.SetProjection(referenceProj) gdal.ReprojectImage(input,output,inputProj,referenceProj,gdalconst.GRA_Bilinear) del output return(0)

def save_raster_compress(outname1, rastype1, rasdata1,lon1,lat1,lon2,lat2,cols, rows): ## testef w/gtiff ok xmin=lon1 ymin=lat1 xmax=lon2 ymax=lat2 nx=cols ny=rows xres = (xmax - xmin) / float(nx) yres = (ymax - ymin) / float(ny) geotransform = (xmin, xres, 0, ymax, 0, -yres)

#dst_ds = gdal.GetDriverByName(rastype1).Create(outname1, nx, ny, 1, gdal.GDT_Float32, options=['COMPRESS=DEFLATE']) dst_ds = gdal.GetDriverByName(rastype1).Create(outname1, nx, ny, 1, gdal.GDT_Float32) dst_ds.SetGeoTransform(geotransform) # specify coords srs = osr.SpatialReference() # establish encoding srs.ImportFromEPSG(4326) # lat/long dst_ds.SetProjection(srs.ExportToWkt()) # export coords to file dst_ds.GetRasterBand(1).WriteArray(rasdata1) # write r-band to the raster dst_ds.FlushCache() # write to disk dst_ds = None return(0)


def load_hadcm3_slice(hadpath1,inf1,vari1, taimi1): ind1 = nc.Dataset(inf1) #print(ind1) hlons1=ind1['lon'][:] hlats1=ind1['lat'][:] hslice1=ind1[vari1][taimi1] return(hlons1, hlats1,hslice1)


def load_hadcm3_avg(hadpath1,inf1,vari1, taimi1):

slicesize=32 ind1 = nc.Dataset(inf2) #print(ind1) hlons1=ind1['lon'][:] hlats1=ind1['lat'][:] hslices=[] for n in range(0,slicesize): hslices.append(ind1[vari1][taimi1+12*n])

hslice=hslices[0]*0.0

for n in range(0,slicesize): #print(n) hslice=hslice+hslices[n]

hslice=hslice/float(slicesize)

print(hslice) return(hlons1, hlats1,hslice)


def process_hadcm3_slice(year1, month1, lon1, lat1, lon2, lat2):

baseyear1=22500 ## CURRENT baseyear1=2500 vari1='lwe_thickness_of_surface_snow_amount' inf1 = 'regrid_lwe_thickness_of_surface_snow_amount_20_22.5kyr.nc' ## CURRENT inf1='regrid_lwe_thickness_of_surface_snow_amount_0_2.5kyr.nc'


hadpath1='./haddaway/'

taimi1=(baseyear1-year1)*12+month1

inf2=hadpath1+inf1 hlons1, hlats1, hslice1=load_hadcm3_slice(hadpath1,inf2,vari1, taimi1) zhape1=np.shape(hslice1) cols1=zhape1[1] rows1=zhape1[0] hslice2=np.flipud(hslice1) save_raster_compress("./work/s360.nc","NetCDF", hslice2, 0,0, 360,89.5,cols1, rows1) save_raster_compress("./work/s360.tif","GTiff", hslice2, 0,0, 360,89.5,cols1, rows1) rotate_to_180("./work/s360.tif","./work/s180.tif") ds=gdal.Translate("./work/smallfile.tif", "./work/s180.tif", projWin=[lon1, lat2, lon2, lat1]) ds = gdal.Translate("./work/smallfile.nc", "./work/smallfile.tif", format='NetCDF' ) ds=0 return (hslice2,hlons1,hlats1, cols1, rows1)


def process_big_rasters(inames1,lon1, lat1, lon2, lat2, width1, height1):

num1=len(inames1)

for n in range(0,num1): inam1=inames1[n] bigrname1="./work/bigfile_r_"+str(n)+".tif" bigname1="./work/bigfile_"+str(n)+".tif" bigname2="./work/bigfile_"+str(n)+".nc" midiname1="./work/midifile_"+str(n)+".tif" midiname2="./work/midifile_"+str(n)+".nc" ds = gdal.Translate(bigrname1, inam1, projWin=[lon1, lat2, lon2, lat1]) ds = 0 ds = gdal.Warp(bigname1, bigrname1, width=width1, height=height1, resampleAlg='bilinear') ds = 0 raster_reso_to_reference_raster(bigrname1,"./work/smallfile.tif" , midiname1) ds = 0 ds = gdal.Translate(midiname2, midiname1, format='NetCDF' ) ds=0 ds = gdal.Translate(bigname2, bigname1, format='NetCDF' ) ds=0

bigstak=[] midistak=[]

for n in range(0,num1): bigrname1="./work/bigfile_r_"+str(n)+".tif" bigname1="./work/bigfile_"+str(n)+".tif" midiname1="./work/midifile_"+str(n)+".tif" m1,midila,midilo=load_raster_float32(midiname1) b1,bigla, biglo=load_raster_float32(bigname1) bigstak.append(b1) midistak.append(m1)

return(bigstak, midistak,bigla, biglo, midila, midilo)



    1. half euro
    2. 360
  1. lon1=0
  2. lat1=40
  3. lon2=100
  4. lat2=80
    1. beringia

lon1=-179.5 lat1=50 lon2=-119.5 lat2=80

    1. siberia
  1. lon1=90
  2. lat1=50
  3. lon2=180
  4. lat2=80
  1. width1=2400
  2. height1=1200

width1=1000 height1=500

  1. width1=400
  2. height1=200

month1=1

year1=20050

      1. CURRENT year1=150

create_dirs()


  1. plt.imshow(loz1)
  1. plt.show()
  1. quit(-1)
    1. WARNING 0YR experiment, not 20000 YR

hslice2,hlons1,hlats1, cols1, rows1=process_hadcm3_slice(year1, month1, lon1, lat1, lon2, lat2)


smallname = "./work/smallfile.tif"

process_raster("./haddaway/CHELSA_TraCE21k_dem_-200_V1.0.tif","dem",lon1, lat1, lon2, lat2, width1, height1) process_raster("./haddaway/CHELSA_TraCE21k_glz_-200_V1.0.tif","glz",lon1, lat1, lon2, lat2, width1, height1)

create_landsea_x("./work/big_dem.tif", "./work/big_landsea.tif") sea_proximity("./work/big_landsea.tif", "./work/big_distcoast.tif")

create_landsea_x("./work/midi_dem.tif", "./work/midi_landsea.tif") sea_proximity("./work/midi_landsea.tif", "./work/midi_distcoast.tif")


rawnames1=[]

rawnames1.append("./haddaway/CHELSA_TraCE21k_dem_-200_V1.0.tif")

  1. rawnames1.append("./haddaway/CHELSA_TraCE21k_glz_-200_V1.0.tif")

rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio12_-200_V1.0.tif") rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio01_-200_V1.0.tif")

  1. rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio19_-200_V1.0.tif")
  2. rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio11_-200_V1.0.tif")


isostuff1=0

if(isostuff1==1): rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio01_-200_V1.0.tif") #rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio02_-200_V1.0.tif") rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio03_-200_V1.0.tif") #rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio04_-200_V1.0.tif") #rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio05_-200_V1.0.tif") #rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio06_-200_V1.0.tif") rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio07_-200_V1.0.tif") #rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio08_-200_V1.0.tif") #rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio09_-200_V1.0.tif") #rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio10_-200_V1.0.tif") rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio11_-200_V1.0.tif") rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio12_-200_V1.0.tif") #rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio13_-200_V1.0.tif") #rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio14_-200_V1.0.tif") rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio15_-200_V1.0.tif") rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio16_-200_V1.0.tif") rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio17_-200_V1.0.tif") #rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio18_-200_V1.0.tif") rawnames1.append("./haddaway2/CHELSA_TraCE21k_bio19_-200_V1.0.tif")


bigstak, midistak,bigla, biglo, midila, midilo= process_big_rasters(rawnames1,lon1, lat1, lon2, lat2, width1, height1)

mloz1,mlaz1 = np.meshgrid(midilo, midila) bloz1,blaz1 = np.meshgrid(biglo, bigla)

smallz1,sla1,slo1=load_raster_float32(smallname) sloz1, slaz1 = np.meshgrid(slo1, sla1)

x_save_raster("./work/small_lon.nc", "NetCDF", sloz1, slo1, sla1) x_save_raster("./work/small_lon.tif", "GTiff", sloz1, slo1, sla1) x_save_raster("./work/small_lat.nc", "NetCDF", slaz1, slo1, sla1) x_save_raster("./work/small_lat.tif", "GTiff", slaz1, slo1, sla1)

x_save_raster("./work/midi_lon.nc", "NetCDF", mloz1, midilo, midila) x_save_raster("./work/midi_lon.tif", "GTiff", mloz1, midilo, midila) x_save_raster("./work/midi_lat.nc", "NetCDF", mlaz1, midilo, midila) x_save_raster("./work/midi_lat.tif", "GTiff", mlaz1, midilo, midila)

x_save_raster("./work/big_lon.nc", "NetCDF", bloz1, biglo, bigla) x_save_raster("./work/big_lon.tif", "GTiff", bloz1, biglo, bigla) x_save_raster("./work/big_lat.nc", "NetCDF", blaz1, biglo, bigla) x_save_raster("./work/big_lat.tif", "GTiff", blaz1, biglo, bigla)


load_geo_rasters=0

if(load_geo_rasters==1): az1,ay1,az1=load_raster_float32("./work/big_dem.tif") az2=np.ravel(az1) bigstak.append(az2) az1,ay1,az1=load_raster_float32("./work/midi_dem.tif") az2=np.ravel(az1) bigstak.append(az2)

az1,ay1,az1=load_raster_float32("./work/big_landsea.tif") az2=np.ravel(az1) bigstak.append(az2) az1,ay1,az1=load_raster_float32("./work/midi_landsea.tif") az2=np.ravel(az1) bigstak.append(az2)

az1,ay1,az1=load_raster_float32("./work/big_distcoast.tif") az2=np.ravel(az1) bigstak.append(az2) az1,ay1,az1=load_raster_float32("./work/midi_distcoast.tif") az2=np.ravel(az1) bigstak.append(az2)

az1,ay1,az1=load_raster_float32("./work/big_lon.tif") az2=np.ravel(az1) bigstak.append(az2) az1,ay1,az1=load_raster_float32("./work/midi_lon.tif") az2=np.ravel(az1) bigstak.append(az2)

az1,ay1,az1=load_raster_float32("./work/big_lat.tif") az2=np.ravel(az1) bigstak.append(az2) az1,ay1,az1=load_raster_float32("./work/midi_lat.tif") az2=np.ravel(az1) bigstak.append(az2)



smash1, meduza=filter_nas_out_from_small_and_midi(smallz1, midistak)


  1. plt.imshow(smash1)
  2. plt.show()


  1. quit(-1)


small_model=midistak[0] big_model=bigstak[0]

sheipi1=np.shape(big_model) cols3=sheipi1[1] rows3=sheipi1[0]


num1=len(midistak)

bigs=[] smalls=[]

    1. WARNING data filtering!
    2. clamp NaN out!
  1. print(num1)

replaceval1=5.0


for n in range(0,num1): print("QR") print(n) #m1=midistak[n] #b1=bigstak[n] m1=meduza[n] b1=bigstak[n] m1=m1.flatten() b1=b1.flatten() ## jn warning test #b1=b1.replace(np.inf, np.nan) #m1=m1.replace(np.inf, np.nan) b1[np.isinf(b1)]=replaceval1 m1[np.isinf(m1)]=replaceval1 b1[np.isnan(b1)]=replaceval1 m1[np.isnan(m1)]=replaceval1 m1[m1==9.96920997e+36]=replaceval1 b1[b1==9.96920997e+36]=replaceval1 m1[m1==-32768]=replaceval1 b1[b1==-32768]=replaceval1 smalls.append(m1) bigs.append(b1)


spek1=bigs[0] spek2=np.reshape(spek1,sheipi1)

  1. plt.imshow(spek2)

smallz1[smallz1==9.96920997e+36] = replaceval1 smash1[smash1==9.96920997e+36] = replaceval1 smallz1[ np.isinf(smallz1) ] = replaceval1 smash1[ np.isinf(smash1) ] = replaceval1 smallz1[ np.isnan(smallz1) ] = replaceval1 smash1[ np.isnan(smash1) ] = replaceval1

    1. jn warning input data upper loimit

smallz1[smallz1>replaceval1 ] = replaceval1 smash1[ smash1>replaceval1 ] = replaceval1


  1. print(smash1)


  1. quit(-1)


  1. plt.imshow(smallz1)
  1. plt.show()
  1. quit(-1)

apoints1=list(zip(*smalls))

  1. bpoints1=smallz1

bpoints1=smash1 cpoints1=list(zip(*bigs))

  1. y2=linear_regression_singe_var(r2, r1, r4)

y2=random_forest_multiple_vars(apoints1, bpoints1, cpoints1)

  1. y2=gam_multiple_vars(apoints1, bpoints1, cpoints1)
  2. y2=linear_regression_multiple_vars(apoints1, bpoints1, cpoints1) ## nok


y2_100=y2*100

ro1=np.reshape(y2_100,sheipi1)

  1. plt.imshow(ro1)
  2. plt.show()

snowdepth_est=ro1*7


save_raster_compress("output.nc","NetCDF", ro1, lon1,lat1,lon2,lat2,cols3, rows3) gaussian_blur("output.nc", "gauss.nc", 10)

save_raster_compress("output_sndest.nc","NetCDF", snowdepth_est, lon1,lat1,lon2,lat2,cols3, rows3) gaussian_blur("output.nc", "gauss_sndest.nc", 10)


  1. plt.imshow(ro1)
  2. plt.show()

print(".")



Landmask

    1. create Panoply landmask from Chelsa climate dem file
    2. version 0001 11.5.2021

library(raster) library(sp) library(rgdal) library(magick)

rin1<-raster("./haddaway/CHELSA_TraCE21k_dem_-200_V1.0.tif")

    1. correct extent of raster

bb1 <- extent(-180.0, 180.0, -90.0, 90.0)

rout0 <- extend(rin1, bb1)

    1. create smaller raster for use w/panoply
  1. s <- raster(nrow = 1000, ncol = 2000)
  2. s <- raster(nrow = 9000, ncol = 18000)

s <- raster(nrow = 3600, ncol = 7200)

extent(s) <- extent(rout0) rout1 <- resample(rout0, s, method = 'bilinear') rf <- writeRaster(rout1, filename="./dem2.tif", format="GTiff", overwrite=TRUE)

rin2<-raster("./dem2.tif")

rin2[is.na(rin2)] <- 0

rin2[rin2<0]=0 rin2[rin2>0]=1

print(rin2)

rout2<-rin2

rf <- writeRaster(rout2, filename="./mask0.tif", format="GTiff", overwrite=TRUE)

system("gdal_translate -of Gif -ot Byte mask0.tif lama0.gif")

tiger2 <- image_read('lama0.gif')

tiger3=image_negate(tiger2)

image_write(tiger3, path = "landmask_2.gif", format = "gif")

Icemask

    1. convert "R" Chelsa Trace gle ice masf .tif to Panoply mask .gif
    2. 12.5.2021 v 0000

library(raster) library(sp) library(rgdal) library(magick)

rin1<-raster("./haddaway/CHELSA_TraCE21k_gle_-200_V1.0.tif")

    1. correct extent of raster

bb1 <- extent(-180.0, 180.0, -90.0, 90.0)

rout0 <- extend(rin1, bb1)

    1. create smaller raster for use w/panoply
  1. s <- raster(nrow = 1000, ncol = 2000)
  2. s <- raster(nrow = 1800, ncol = 3600)

s <- raster(nrow = 3600, ncol = 7200)

  1. s <- raster(nrow = 9000, ncol = 18000)

extent(s) <- extent(rout0) rout1 <- resample(rout0, s, method = 'bilinear') rf <- writeRaster(rout1, filename="./ice2.tif", format="GTiff", overwrite=TRUE)

  1. rf <- writeRaster(rout1, filename="./ice2.nc", format="NetCDF", overwrite=TRUE)
  1. rout2<-raster("ice2.tif")
  2. writeRaster(rout2, "ice2.nc", options=c("COMPRESS=DEFLATE", "FORMAT=NC4"), overwrite=TRUE)

system("gdal_translate -of Gif -ot Byte ice2.tif ice0.gif")

tiger2 <- image_read('ice0.gif')

tiger3=image_negate(tiger2)

image_write(tiger3, path = "icemask_2.gif", format = "gif")

Licensing[edit]

I, the copyright holder of this work, hereby publish it under the following license:
w:en:Creative Commons
attribution share alike
This file is licensed under the Creative Commons Attribution-Share Alike 4.0 International license.
You are free:
  • to share – to copy, distribute and transmit the work
  • to remix – to adapt the work
Under the following conditions:
  • attribution – You must give appropriate credit, provide a link to the license, and indicate if changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use.
  • share alike – If you remix, transform, or build upon the material, you must distribute your contributions under the same or compatible license as the original.

File history

Click on a date/time to view the file as it appeared at that time.

Date/TimeThumbnailDimensionsUserComment
current07:01, 14 May 2021Thumbnail for version as of 07:01, 14 May 20211,472 × 976 (574 KB)Merikanto (talk | contribs)Data and layout
16:12, 12 May 2021Thumbnail for version as of 16:12, 12 May 20211,472 × 976 (499 KB)Merikanto (talk | contribs)Uploaded own work with UploadWizard

The following page uses this file: