File:France precipitation 22000 bp 1.png

From Wikimedia Commons, the free media repository
Jump to navigation Jump to search

Original file(1,616 × 1,072 pixels, file size: 1.59 MB, MIME type: image/png)

Captions

Captions

Precipitation of France, 22000 years ago

Summary

[edit]
Description
English: Precipitation of France, 22000 years ago. Trace21ka dataset, downscaled with RF and Gamma methods.
Date
Source Own work
Author Merikanto
Camera location45° 00′ 00″ N, 1° 00′ 00″ E  Heading=1° Kartographer map based on OpenStreetMap.View this and other nearby images on: OpenStreetMapinfo

This image is based on Trace21ka dataset on Earth System Grid.

TraCE-21ka: Simulation of Transient Climate Evolution over the last 21,000 years

https://www.cgd.ucar.edu/ccr/TraCE/ https://www.earthsystemgrid.org/project/trace.html https://www.earthsystemgrid.org/dataset/ucar.cgd.ccsm.trace.html

Otto-Bliesner, B. & Liu, Zhensheng & He, Feng & Brady, Esther & Clark, P. & Carlson, Andrew & Tomas, Robert & Erickson, D. & Jacob, Robert. (2008). Transient Simulation of Climate Evolution over the Last 21,000 years (TraCE21,000): First Results. AGU Fall Meeting.

Windninja program https://www.firelab.org/project/windninja


Selected simple way to downscale, but 2-staged.

Data is downscaled with rater produced with WindNinja and other rastets dem, lon, lat, distance, aspect, slope.

Codes are in "R" and "Python", GDAL needed.

Code to get Trace21ka small rasters

    1. trace21ka wind data catch, from one slice
    2. 9.11.2021 0000.0001

library(raster) library(ncdf4) library(rgdal) library(REdaS) library(RPMG)

    1. data source trace21ka decadal average data PRECC, PRECL

file1="./tracedata/trace.01-36.22000BP.cam2.U.22000BP_decavg_400BCE.nc" file2="./tracedata/trace.01-36.22000BP.cam2.V.22000BP_decavg_400BCE.nc" file3="./tracedata/trace.01-36.22000BP.cam2.PRECC.22000BP_decavg_400BCE.nc" file4="./tracedata/trace.01-36.22000BP.cam2.PRECL.22000BP_decavg_400BCE.nc"

startime1=1 #1 january 7 july

    1. capture rain

count1=1

nc3=nc_open(file3)

  1. PRECC

rdata3 <- ncvar_get( nc3, "PRECC", start=c(1,1,startime1), count=c(96,48,count1) )

lons1<- ncvar_get( nc3, "lon") lats1<- ncvar_get( nc3, "lat")

nc_close(nc3)

nc4=nc_open(file4)

    1. PRECL

rdata4 <- ncvar_get( nc4, "PRECL", start=c(1,1,startime1), count=c(96,48,count1) )

nc_close(nc4)

  1. ext1<-c(minlon2, maxlon1, minlat2, maxlat2)

ext1<-c(0, 360, -90,90)

  1. head(data1)

rd2<-t(rdata3) rd3<-apply(rd2,2,rev) rr0 <- raster(rd3) extent(rr0) <- ext1 rr1<-rotate(rr0)

rda2<-t(rdata4) rda3<-apply(rda2,2,rev) rra0 <- raster(rda3) extent(rra0) <- ext1 rr2<-rotate(rra0)

rainraster1<-rr1+rr2

    1. calcu raintaster/month mm not m s-1
    1. yearly rain, not monthly!

rainraster1<-rainraster1*60*60*24*365*1000

image(rainraster1)

crs(rainraster1)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

writeRaster(rainraster1, "rain.nc", overwrite=TRUE, format="CDF", varname="Rain", varunit="",

       longname="Rain", xname="lon",   yname="lat")
       


    1. capture annual wind

count1=1

startime1=1 #1 january 7 july

  1. layer1=26 near surf, 1=top! ## 26 near surf

layer1=26

count1=1

nc1=nc_open(file1)

  1. U

udata1 <- ncvar_get( nc1, "U", start=c(1,1,layer1,startime1), count=c(96,48,1,1) )

lons1<- ncvar_get( nc1, "lon") lats1<- ncvar_get( nc1, "lat") levs1<- ncvar_get( nc1, "lev") nc_close(nc1)

nc2=nc_open(file2)

    1. V

vdata1 <- ncvar_get( nc2, "V", start=c(1,1,layer1,startime1), count=c(96,48,1,1) )

nc_close(nc2)

ext1<-c(0, 360, -90,90)

str(ext1)

  1. head(data1)
  1. str(data1)

dim(udata1)

  1. head(data1)

udata2<-t(udata1) udata3<-apply(udata2,2,rev) ur0 <- raster(udata3) extent(ur0) <- ext1 ur1<-rotate(ur0)

vdata2<-t(vdata1) vdata3<-apply(vdata2,2,rev) vr0 <- raster(vdata3) extent(vr0) <- ext1 vr1<-rotate(vr0)

speedr1<-sqrt(ur1*ur1+vr1*vr1)

image(speedr1)

  1. dirr1<-rad2deg(atan2(vr1,ur1))

PII=3.14159265358979323846

  1. dirr1 = atan2(-vr1,-ur1)/PII*180 + 180

dirr1 = fmod(atan2(vr1,ur1)/PII*180,360)

image(dirr1)

plot(dirr1)

crs(ur1)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

writeRaster(ur1, "u.nc", overwrite=TRUE, format="CDF", varname="U", varunit="ms-1",

       longname="U", xname="lon",   yname="lat")
       

crs(vr1)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

writeRaster(vr1, "v.nc", overwrite=TRUE, format="CDF", varname="V", varunit="ms-1",

       longname="V", xname="lon",   yname="lat")
       
       

crs(dirr1)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

writeRaster(dirr1, "windir.nc", overwrite=TRUE, format="CDF", varname="windir", varunit="deg",

       longname="windir", xname="lon",   yname="lat")
       

crs(speedr1)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

writeRaster(speedr1, "windspeed.nc", overwrite=TRUE, format="CDF", varname="windspeed", varunit="ms-1",

       longname="windspeed", xname="lon",   yname="lat")



Code to acquire Windninja (NOTE not used here, used windninjas own raster.)

    1. "R" generate france UTM dem for windninja drom input
    2. map, set reso to 200 m

library(raster) library(rgdal)

  1. merit dem 250 m, 2017

infilename1<-"E:/varasto_dem/merit_dem_250.tif"

  1. infilename1<-"E:/varasto_dem/CHELSA_TraCE21k_dem_-200_V1.0.tif"

outfilename1="france_dem_in.tif"

    1. france

ext1 <- extent(-5,12,40,52)

  1. ext1 <- extent(-5,12,38,52)
  2. ext1 <- extent(-12,16,36,54) ## nok

inras1<-raster(infilename1)

r<-crop(inras1,ext1)

r[is.na(r[])] <- 0

crs(r) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0" writeRaster(r, filename="france.tif", overwrite=TRUE, format="GTiff", xname="lon", yname="lat")

r2<-raster("france.tif")

    1. jn warning check this jn

sr <- "+proj=utm +zone=31 +ellps=GRS80 +datum=WGS84 +units=m +no_defs"

projected_raster <- projectRaster(r2, crs = sr)

r3<-projected_raster

r3[is.na(r3[])] <- 0

ss <- raster(resolution=c(200,200), crs=proj4string(r3), ext=extent(r3))

rs <- resample(r3, ss) #

res(rs)

writeRaster(rs, filename=outfilename1, overwrite=TRUE, format="GTiff", xname="lon", yname="lat")

Code to post-process Windninja data.


    1. ASCII vtk to netcdf wind data "R" script
    2. reads output from windninja vtk
    3. and input raster file
    4. 11.11.2021 0000.0007


library(raster) library(rgdal) library(ncdf4) library(easyNCDF)

library(stringr)

library(REdaS) library(RPMG)


askname1<-"france_dem_in.tif" inf1<-"france_dem_in_310_20_8162m.vtk"


rask1<-raster(askname1) str(rask1)


crs0<-crs(rask1) ext0<-extent(rask1)


post_ext<-c(-4.5,10.0,42.0,50.5 )

  1. post_ext<-c(-4.0,6.0,40.0,50.0 )


  1. ext1<-extent()


fbuf1<-readLines(inf1)


  1. head(fbuf1)

info1<-fbuf1[6]

info1

infos01<-strsplit(info1, split = " ")

infos1<-as.vector(infos011)

infos1

a1=as.integer(infos1[2]) a2=as.integer(infos1[3]) a3=as.integer(infos1[4])

b1=a1*a2*a3

len1<-a1*a2


b1

lok1= b1+11


linee1<-fbuf1[lok1]


print(linee1)


dfin1 = read.csv(inf1, skip = lok1,sep=" ", header = F)


names(dfin1)<-c("U","V","W")


head(dfin1)


z1 <- as.vector(dfin11) z2 <- as.vector(dfin12) z3 <- as.vector(dfin13)

  1. z0<-dfin1[3]
  1. head(z1)

u1<-array(z1, dim=c(a1,a2,a3)) v1<-array(z2, dim=c(a1,a2,a3)) w1<-array(z3, dim=c(a1,a2,a3))


  1. head(z2)

u<-u1[,,1] v<-v1[,,1] w<-w1[,,1]

  1. head(z3)
  1. image(w)
  2. plot(z3)


rw1 <- raster(nrow=a1, ncol=a2) rw1 <- setValues(rw1,w) rw1 <- t(rw1) rw1<-flip(rw1, direction="y")

ru1 <- raster(nrow=a1, ncol=a2) ru1 <- setValues(ru1,u) ru1 <- t(ru1) ru1<-flip(ru1, direction="y")

rv1 <- raster(nrow=a1, ncol=a2) rv1 <- setValues(rv1,v) rv1 <- t(rv1) rv1<-flip(rv1, direction="y")


wind_vel_1<-sqrt(ru1*ru1+rv1*rv1)

  1. image(velr1)


PII=3.14159265358979323846

  1. dirr1 = atan2(-vr1,-ur1)/PII*180 + 180

wind_dir_1 = fmod(atan2(rv1,ru1)/PII*180,360)


  1. crs(rw1) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

crs(rw1)<-crs0 extent(rw1)<-ext0 crs(ru1)<-crs0 extent(ru1)<-ext0 crs(rv1)<-crs0 extent(rv1)<-ext0

crs(wind_dir_1 )<-crs0 extent(wind_dir_1 )<-ext0

crs(wind_vel_1 )<-crs0 extent(wind_vel_1 )<-ext0


writeRaster(rw1, "accu_vel_w_orig.nc", overwrite=TRUE, format="CDF", varname="w", varunit="ms-1",

       longname="w", xname="long",   yname="lat")


crsout1<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

rw2 <- projectRaster(rw1, crs = crs(crsout1)) ru2 <- projectRaster(ru1, crs = crs(crsout1)) rv2 <- projectRaster(rv1, crs = crs(crsout1)) wind_vel_2 <- projectRaster(wind_vel_1, crs = crs(crsout1)) wind_dir_2 <- projectRaster(wind_dir_1, crs = crs(crsout1))


crs(rw2) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

rw2<-crop(rw2,post_ext)


writeRaster(rw2, "accu_vel_w.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")
 

crs(wind_dir_2) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

wind_dir_2<-crop(wind_dir_2,post_ext)

writeRaster(wind_dir_2, "accu_dir.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")


sablunaras1<-raster(ncol=800, nrow=800)

extent(sablunaras1)<-ext0 crs(sablunaras1)<-crs0

demout0<-resample(rask1, sablunaras1)


print ("Small DEM ...")


demout1<- projectRaster(demout0, crs = crs(crsout1))


crs(demout1) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

demout1<-crop(demout1,post_ext)

writeRaster(demout1, "saccu_dem_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")
       
       

print("Distance, slope and aspect in python ...")

system("python richdem1.py")


    1. big ras

rin1 <- raster("saccu_dem_1.nc")

  1. small ras

rin2 <- raster("accu_vel_w.nc")

lonr1 <- init(rin1, 'x') latr1 <- init(rin1, 'y') lonr2 <- init(rin2, 'x') latr2 <- init(rin2, 'y')

dem2<-resample(rin1, rin2)


crs(lonr1) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0" crs(latr1) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0" crs(lonr2) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0" crs(latr2) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0" crs(dem2) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"


writeRaster(lonr1, "saccu_lon_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")
       

writeRaster(latr1, "saccu_lat_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")
              

writeRaster(lonr2, "accu_lon_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")
       

writeRaster(latr2, "accu_lat_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")
              

writeRaster(dem2, "accu_dem_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")


rin3 <- raster("saccu_aspect_1.tif") rin4 <- raster("saccu_slope_1.tif") rin5 <- raster("saccu_distance_1.tif")

rout3<-resample(rin3, rin2) rout4<-resample(rin4, rin2) rout5<-resample(rin5, rin2)

crs(rin3)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0" crs(rin4)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0" crs(rin5)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

crs(rout3)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0" crs(rout4)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0" crs(rout5)<-"+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"


writeRaster(rin3, "saccu_aspect_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")

writeRaster(rin4, "saccu_slope_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")

writeRaster(rin5, "saccu_distance_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")


writeRaster(rout3, "accu_aspect_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")

writeRaster(rout4, "accu_slope_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")

writeRaster(rout5, "accu_distance_1.nc", overwrite=TRUE, format="CDF", varname="Band1", varunit="",

       longname="Band1", xname="lon",   yname="lat")


print(".")








"richdem1.py", needed by above script

    1. python richdem calculate slope and ascend of dem
    2. 11.11.2021 0000.000

import os import numpy as np import matplotlib import matplotlib.pyplot as plt import elevation import richdem as rd


matplotlib.rcParams['figure.figsize'] = (8, 5.5)

  1. dem_path = os.path.join(os.getcwd(), 'Shasta-30m-DEM.tif')
  2. elevation.clip(bounds=(-122.4, 41.2, -122.1, 41.5), output=dem_path)


os.system("gdal_translate -of GTiff saccu_dem_1.nc saccu_dem_1.tif") os.system("gdal_proximity.py -srcband 1 -distunits GEO -values 0 -maxdist 50000 -nodata 65535 -ot UInt16 -of GTiff saccu_dem_1.tif saccu_distance_1.tif") os.system("gdal_translate -of NetCDF saccu_distance_1.tif saccu_distance_1.nc")

dem_path_1="saccu_dem_1.nc"

dem1 = rd.LoadGDAL(dem_path_1)

slope = rd.TerrainAttribute(dem1, attrib='slope_riserun') aspect = rd.TerrainAttribute(dem1, attrib='aspect')

    1. distance = rd.TerrainAttribute(dem1, attrib='proximity')
  1. rd.rdShow(aspect, axes=False, cmap='jet', figsize=(8, 5.5))
  2. plt.show()


rd.SaveGDAL("saccu_aspect_1.tif", aspect) rd.SaveGDAL("saccu_slope_1.tif", slope)

  1. rd.SaveGDAL("saccu_distance_1.tif", distance)
  1. print("Jee")


Code to attempt downscale rainfall ...

    1. netcdf downscaler
    2. with regression test
    3. also habitat test
    4. python 3,GDAL
    1. v 0012.0003
    2. 11.11.2021

import numpy as np import scipy import pandas as pd import matplotlib.pyplot as plt import matplotlib.image as mpimg from matplotlib import cm from colorspacious import cspace_converter from collections import OrderedDict

  1. import colormaps as cmaps
  2. import cmaps

import netCDF4 as nc import os from scipy.interpolate import Rbf

from sklearn.linear_model import LinearRegression from sklearn.preprocessing import PolynomialFeatures from sklearn.pipeline import make_pipeline from sklearn.ensemble import RandomForestRegressor from sklearn.preprocessing import StandardScaler from sklearn.model_selection import train_test_split from sklearn import svm, metrics from pygam import LogisticGAM from pygam import LinearGAM

import pandas as pd import array as arr import scipy.stats

  1. if basemap is available, we'll use it.
  2. otherwise, we'll improvise later...

try:

   from mpl_toolkits.basemap import Basemap
   basemap = True

except ImportError:

   basemap = False


    1. control vars
    1. random fotest

RF_estimators1=10 RF_features1=2

    1. downscaler

DS_method=0 ## default random forest DS_show=1 ## view downscaled DS_input_log=0 ## convert "b" var to log during downscaling

cache_lons1=[] cache_lats1=[] cache_x=[] cache_y=[] cache_z=[] cache_x2=[] cache_y2=[] cache_z2=[]


def probability_single_var( x1, y1, x2): print ("Specie habitation test")

xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2) sha1=np.shape(x1) dim2=1 x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2))

#y=y*0.0+1 y=x

#print(x) #print(y)


x_mean=np.mean(x) x_std=np.std(x) x_cover_std=(x-x_mean)/x_std

y_mean=np.mean(y) y_std=np.std(y) y_cover_std=(y-y_mean)/y_std

kohde=abs(x-x_mean)/x_std #print (kohde) #print (x-x_mean) #quit()


#plt.plot(x_cover_std)

#plt.show()

#model = LinearRegression().fit(x, kohde) #degree=3 #polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) #model=polyreg.fit(x,kohde) sc = StandardScaler() xa = sc.fit_transform(x) xba = sc.transform(xb)


#model = RandomForestRegressor(n_estimators=10, max_features=2).fit(xa,kohde) #model = LogisticGAM().fit(x, kohde) model = LinearGAM().fit(x, kohde)

y2= model.predict(xb)

#svm1 = svm.OneClassSVM(nu=0.1, kernel="rbf", gamma=0.5) #svm1 = svm.OneClassSVM(nu=0.1, kernel="rbf", gamma=0.5)

#model=svm1.fit(x,y)

#model=svm1.fit(x,kohde)


#y2= model.predict(xb)

y2_mean=np.mean(y2) y2_std=np.std(y2) y2_cover_std=(y2-y2_mean)/y2_std

y3=y2_cover_std

#y5=scipy.stats.norm.cdf(y3,y2_mean,y2_std) y4=scipy.stats.norm.sf(y3,y2_mean,y2_std)

ymin=np.min(y4) deltamax=np.max(y4)-np.min(y4)

y5=((y4-ymin)/deltamax) #zgrid1=np.array(zoutvalue1).reshape(1000, 400) #plt.plot(y5) #cb = plt.scatter(np.arange(1,1000), np.arange(1,400), s=60, c=zoutvalue1)

#plt.show()

#print(np.shape(y5[0])) #stop(-1)

return(y5)



def load_xy(infname1, lonname1, latname1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z global cache_x2 global cache_y2 global cache_z2

indf1=pd.read_csv(infname1, sep=';') #print(indf1)

templons1=indf1[lonname1] templats1=indf1[latname1]

cache_lons1=np.array(templons1) cache_lats1=np.array(templats1)


#print (cache_lons1) #print (cache_lats1)

return(0)


def preprocess_big_raster(infilename1, invarname1, sabluname1, outfilename1, soutfilename1, outvarname1,lon1, lon2, lat1, lat2, width1, height1, roto): gdal_cut_resize_fromnc_tonc(infilename1, sabluname1, outfilename1, invarname1,lon1, lon2, lat1, lat2) gdal_cut_resize_tonc(infilename1,soutfilename1 ,lon1, lon2, lat1, lat2, width1, height1) return(0)

def preprocess_small_single_raster(infilename1, invarname1, outfilename1, outvarname1,lon1, lon2, lat1, lat2, roto): print(infilename1) tempfilename1="./temp00.nc" tempfilename2="./temp01.nc" loadnetcdf_single_tofile(infilename1, invarname1, tempfilename1, outvarname1) #rotate_netcdf_360_to_180(tempfilename1, outvarname1,tempfilename2, outvarname1) gdal_cut_tonc(tempfilename1,outfilename1 ,lon1, lon2, lat1, lat2) return(0)

def preprocess_small_timed_raster(infilename1, invarname1, intime1, outfilename1, outvarname1,lon1, lon2, lat1, lat2, roto): tempfilename1="./temp00.nc" tempfilename2="./temp01.nc" loadnetcdf_timed_tofile(infilename1, invarname1, intime1, tempfilename1, outvarname1) rotate_netcdf_360_to_180(tempfilename1, outvarname1,tempfilename2, outvarname1) gdal_cut_tonc(tempfilename2,outfilename1 ,lon1, lon2, lat1, lat2) return(0)


def rotate_netcdf_360_to_180(infilename1, invarname1,outfilename1, outvarname1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z pointsx1=[] pointsy1=[] gdal_get_nc_to_xyz_in_mem(infilename1, invarname1 ) lonsa=cache_lons1 latsa=cache_lats1 pointsz1=np.array(cache_z) pointsx1=np.array(cache_x) pointsy1=np.array(cache_y) nlatsa1=len(latsa) nlonsa1=len(lonsa) pointsz3=np.reshape(pointsz1, (nlatsa1, nlonsa1)) rama1=int(len(lonsa)/2) pointsz4=np.roll(pointsz3,rama1,axis=1) lonsb=lonsa-180 save_points_to_netcdf(outfilename1, outvarname1, lonsb, latsa, pointsz4) return(0)


def gdal_get_nc_to_xyz_in_mem(inname1, invar1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z global cache_x2 global cache_y2 global cache_z2

imga=loadnetcdf_single_tomem(inname1, invar1) #plt.imshow(imga) lonsa=cache_lons1 latsa=cache_lats1

cache_lons1=[] cache_lats1=[] cache_x=[] cache_y=[] cache_z=[] cache_x2=[] cache_y2=[] cache_z2=[]

dim1=imga.shape nlat1=len(latsa) nlon1=len(lonsa)

#plt.plot(imga) #plt.show() #print(nlat1) #print(nlon1)


#quit(-1) #print(inname1) #print (nlat1)

#quit(-1)

for iy in range(0,nlat1): for ix in range(0,nlon1): pz1=imga[iy,ix] if (str(pz1) == '--'): cache_x.append(lonsa[ix]) cache_y.append(latsa[iy]) cache_z.append(0) else: cache_x.append(lonsa[ix]) cache_y.append(latsa[iy]) cache_z.append(pz1)


#print(cache_z) cache_lons1=lonsa cache_lats1=latsa

#print (pz1) return (cache_z)



def average_tables(daata1, daata2): daata3=daata1 pitu=len(daata1) for n in range(0,pitu): daata3[n]=(daata1[n]+daata2[n])/2

return(daata3)


def add_scalar(daata, skalar): pitu=len(daata) for n in range(0,pitu): daata[n]=daata[n]+skalar

return(daata)


def gam_multiple_vars( x1, y1, x2): print ("GAM") xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2)

#print (xa1)

  1. quit(-1)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2))

#sc = StandardScaler() #xa = sc.fit_transform(x) #xba = sc.transform(xb)

#model = LogisticGAM().fit(x, y) model = LinearGAM().fit(x, y)

y2= model.predict(xb)

return(y2)


def random_forest_multiple_vars( x1, y1, x2): print ("RF")

global RF_estimators1 global RF_features1

print(RF_estimators1, RF_features1)

#quit(-1)

xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2)


#print (xa1)

  1. quit(-1)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2))


#model = LinearRegression().fit(x, y) #degree=3 #polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) #model=polyreg.fit(x,y)

sc = StandardScaler() xa = sc.fit_transform(x) xba = sc.transform(xb)

## orig ##model = RandomForestRegressor(n_estimators=10, max_features=2).fit(xa,y) model = RandomForestRegressor(n_estimators=RF_estimators1, max_features=RF_features1).fit(xa,y)

y2= model.predict(xba) return(y2)


def save_points_toxyz(filename, x,y,z): print("Dummy function only") return(0)

def linear_regression_multiple_vars( x1, y1, x2): print ("MLR") xa1=np.array(x1) ya1=np.array(y1) xa2=np.array(x2)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2))


#model = LinearRegression().fit(x, y) degree=3 polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) model=polyreg.fit(x,y)


y2= model.predict(xb) return(y2)


def linear_regression_singe_var( x1, y1, x2): #print (x1) #print(y1) #return(0)

#xa1=np.array(x1) #ya1=np.array(y1) #xa2=np.array(x2) xa1=np.array( x1) ya1=np.array(y1) xa2=np.array(x2)

sha1=np.shape(x1)

dim2=sha1[1]

x=xa1.reshape((-1, dim2)) y=ya1.reshape((-1, 1)) xb=xa2.reshape((-1, dim2)) #x=xa1.reshape((-1, 1)) #y=ya1.reshape((-1, 1)) #xb=xa2.reshape((-1, 1))

#print (x) #print (y)

model = LinearRegression().fit(x, y) #model = RandomForestRegressor(n_estimators=10, max_features=2).fit(x,y) #degree=2 #polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression()) #polyreg=make_pipeline(PolynomialFeatures(degree), ) #model=polyreg.fit(x,y)

## warning not xb y2= model.predict(xb) #print(y2) #print("LR") return(y2)


def save_points_to_netcdf(outfilename1, outvarname1, xoutlons1, xoutlats1, zoutvalue1):

nlat1=len(xoutlats1) nlon1=len(xoutlons1) #print ("Save ...") #print (nlat1) #print (nlon1) zoutvalue2=np.array(zoutvalue1).reshape(nlat1, nlon1) #indata_set1=indata1 ncout1 = nc.Dataset(outfilename1, 'w', format='NETCDF4') outlat1 = ncout1.createDimension('lat', nlat1) outlon1 = ncout1.createDimension('lon', nlon1) outlats1 = ncout1.createVariable('lat', 'f4', ('lat',)) outlons1 = ncout1.createVariable('lon', 'f4', ('lon',)) outvalue1 = ncout1.createVariable(outvarname1, 'f4', ('lat', 'lon',)) outvalue1.units = 'Unknown' outlats1[:] = xoutlats1 outlons1[:] = xoutlons1 outvalue1[:, :] =zoutvalue2[:] ncout1.close() return 0


def gdal_cut_resize_fromnc_tonc(inname1, inname2, outname2, invar2,lon1, lon2, lat1, lat2): imga=loadnetcdf_single_tomem(inname2, invar2) dim1=imga.shape height1=dim1[0] width1=dim1[1] print (width1) print (height1) jono1="gdalwarp -te "+str(lon1)+" "+str(lat1)+" "+str(lon2)+" "+str(lat2)+" "+"-ts "+str(width1)+" "+str(height1)+ " " +inname1+" "+outname2 print(jono1) os.system(jono1) return



def gdal_get_points_from_file(inname1, invar1,pointsx1,pointsy1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z global cache_x2 global cache_y2 global cache_z2

imga=loadnetcdf_single_tomem(inname1, invar1) #plt.imshow(imga) lonsa=cache_lons1 latsa=cache_lats1

cache_lons1=[] cache_lats1=[] cache_x=[] cache_y=[] cache_z=[] cache_x2=[] cache_y2=[] cache_z2=[]

dim1=imga.shape nlat1=dim1[0] nlon1=dim1[1]

pitu=len(pointsx1) #px1=10 #py1=45 for n in range(0,pitu): px1=pointsx1[n] py1=pointsy1[n] #print('.') for iy in range(0,nlat1): if(py1>=latsa[iy]): for ix in range(0,nlon1): if(px1>=lonsa[ix]): pz1=imga[iy,ix] cache_x.append(lonsa[ix]) cache_y.append(latsa[iy]) cache_z.append(pz1)


#print(cache_z) cache_lons1=lonsa cache_lats1=latsa

#print (pz1) return (cache_z)


def gdal_cut_resize_tonc(inname1, outname1, lon1, lon2, lat1, lat2, width1, height1): #gdalwarp -te -5 41 10 51 -ts 1000 0 input.tif output.tif jono1="gdalwarp -of netcdf -te "+str(lon1)+" "+str(lat1)+" "+str(lon2)+" "+str(lat2)+" "+"-ts "+str(width1)+" "+str(height1)+ " " +inname1+" "+outname1 print(jono1) os.system(jono1) return


def interpolate_cache(x_min, y_min, x_max, y_max, reso1): global cache_lons1 global cache_lats1 global cache_x global cache_y global cache_z global cache_x2 global cache_y2 global cache_z2

cache_x2=[] cache_y2=[] cache_z2=[] cache_lons1=[] cache_lats1=[]

pitu1=len(cache_z)

raja1=pitu1

for i in range(0,raja1): #print (i) #print (cache_z[i]) try: xx=cache_x[i] yy=cache_y[i] zz=cache_z[i] if (str(zz) == '--'): raja1=raja1-1 #print (zz) #print(".") else: cache_x2.append(xx) cache_y2.append(yy) cache_z2.append(zz) except IndexError: print("BRK") break

lonsn=(int(x_max-x_min)/reso1) latsn=(int(y_max-y_min)/reso1) cache_lons1=np.linspace(x_min, x_max, lonsn) cache_lats1=np.linspace(y_min, y_max, latsn)

#print (cache_z2) #print (cache_x2) #exit(-1)

grid_x, grid_y = np.mgrid[x_min:x_max:reso1, y_min:y_max:reso1] rbfi = Rbf(cache_x2, cache_y2, cache_z2) di = rbfi(grid_x, grid_y) #plt.figure(figsize=(15, 15)) #plt.imshow(di.T, origin="lower") #cb = plt.scatter(df.x, df.y, s=60, c=df.por, edgecolor='#ffffff66') #plt.colorbar(cb, shrink=0.67) #plt.show() return(di.T)


def makepoints(imgin1, lonsin1, latsin1): global cache_x global cache_y global cache_z cache_x=[] cache_y=[] cache_z=[] dim1=imgin1.shape nlat1=dim1[0] nlon1=dim1[1] k=0 for iy in range(0,nlat1): for ix in range(0,nlon1): zz=imgin1[iy,ix] cache_x.append(lonsin1[ix]) cache_y.append(latsin1[iy]) if (str(zz) == '--'): ## warning there 0 append to equalize grid cache_z.append(0) k=1 else: cache_z.append(zz)


#cache_z.append(imgin1[iy,ix])

return(0)


def gdal_reso(inname1, outname1, reso1): jono1="gdalwarp -tr "+str(reso1)+" "+str(reso1)+" "+inname1+" "+outname1 os.system(jono1) return(0)


def gdal_cut_tonc(inname1, outname1, lon1, lon2, lat1, lat2):

jono1="gdalwarp -te "+str(lon1)+" "+str(lat1)+" "+str(lon2)+" "+str(lat2)+" "+inname1+" "+outname1 print(jono1) os.system(jono1)

return(0)


def savenetcdf_single_frommem(outfilename1, outvarname1, xoutvalue1,xoutlats1,xoutlons1): nlat1=len(xoutlats1) nlon1=len(xoutlons1) #indata_set1=indata1 ncout1 = nc.Dataset(outfilename1, 'w', format='NETCDF4') outlat1 = ncout1.createDimension('lat', nlat1) outlon1 = ncout1.createDimension('lon', nlon1) outlats1 = ncout1.createVariable('lat', 'f4', ('lat',)) outlons1 = ncout1.createVariable('lon', 'f4', ('lon',)) outvalue1 = ncout1.createVariable(outvarname1, 'f4', ('lat', 'lon',)) outvalue1.units = 'Unknown' outlats1[:] = xoutlats1 outlons1[:] = xoutlons1 outvalue1[:, :] =xoutvalue1[:] ncout1.close() return 0


def loadnetcdf_single_tomem(infilename1, invarname1): global cache_lons1 global cache_lats1 print(infilename1) inc1 = nc.Dataset(infilename1) inlatname1="lat" inlonname1="lon" inlats1=inc1[inlatname1][:] inlons1=inc1[inlonname1][:] cache_lons1=inlons1 cache_lats1=inlats1 indata1_set1 = inc1[invarname1][:] dim1=indata1_set1.shape nlat1=dim1[0] nlon1=dim1[1] inc1.close() return (indata1_set1)


def loadnetcdf_single_tofile(infilename1, invarname1, outfilename1, outvarname1): inc1 = nc.Dataset(infilename1) inlatname1="lat" inlonname1="lon" inlats1=inc1[inlatname1][:] inlons1=inc1[inlonname1][:] indata1_set1 = inc1[invarname1][:] dim1=indata1_set1.shape nlat1=dim1[0] nlon1=dim1[1] #indata_set1=indata1 ncout1 = nc.Dataset(outfilename1, 'w', format='NETCDF4') outlat1 = ncout1.createDimension('lat', nlat1) outlon1 = ncout1.createDimension('lon', nlon1) outlats1 = ncout1.createVariable('lat', 'f4', ('lat',)) outlons1 = ncout1.createVariable('lon', 'f4', ('lon',)) outvalue1 = ncout1.createVariable(outvarname1, 'f4', ('lat', 'lon',)) outvalue1.units = 'Unknown' outlats1[:] = inlats1 outlons1[:] = inlons1 outvalue1[:, :] = indata1_set1[:] ncout1.close() return 0


def loadnetcdf_timed_tofile(infilename1, invarname1, intime1, outfilename1, outvarname1): inc1 = nc.Dataset(infilename1) inlatname1="lat" inlonname1="lon" inlats1=inc1[inlatname1][:] inlons1=inc1[inlonname1][:] indata1 = inc1[invarname1][:] dim1=indata1.shape nlat1=dim1[1] nlon1=dim1[2] indata_set1=indata1[intime1] #img01=indata_set1 #img1.replace(np.nan, 0, inplace=True) #img1 = np.where(isna(img10), 0, img10) #where_are_NaNs = np.isnan(img1) #img1[where_are_NaNs] = 99 #img1 = np.where(np.isnan(img01), 0, img01) ncout1 = nc.Dataset(outfilename1, 'w', format='NETCDF4') outlat1 = ncout1.createDimension('lat', nlat1) outlon1 = ncout1.createDimension('lon', nlon1) outlats1 = ncout1.createVariable('lat', 'f4', ('lat',)) outlons1 = ncout1.createVariable('lon', 'f4', ('lon',)) outvalue1 = ncout1.createVariable(outvarname1, 'f4', ('lat', 'lon',)) outvalue1.units = 'Unknown' outlats1[:] = inlats1 outlons1[:] = inlons1 outvalue1[:, :] = indata_set1 ncout1.close() return 0


    1. downscaler funktzione !
  1. def downscale_data_1(input_rasters,loadfiles,intime1,lon1,lon2,lat1,lat2,width1,height1,roto,areaname,sresultfilename1,sresultvarname1,infilenames1, outfilenames1,soutfilenames1,invarnames1,outvarnames1, soutvarnames1):

def downscale_data_1(input_rasters,loadfiles,intime1,lon1,lon2,lat1,lat2,width1,height1,roto,areaname,sresultfilename1,sresultvarname1,infilenames1, invarnames1):

global DS_method global DS_show global DS_input_log

debug=0

#if(input_rasters==1): # os.system("del *.nc")

outfilenames1=[] outvarnames1=[] soutfilenames1=[] soutvarnames1=[]

huba0=len(infilenames1) for n in range (0,huba0): sandersson1=invarnames1[n] outvarnames1.append(sandersson1) soutvarnames1.append(sandersson1)

## big raster?? outfilenames1.append("./"+areaname+"_"+invarnames1[0]+".nc")

if(input_rasters==1): #preprocess_small_timed_raster(infilenames1[0], invarnames1[0], intime1, outfilenames1[0], outvarnames1[0], lon1, lon2, lat1, lat2,roto) preprocess_small_single_raster(infilenames1[0], invarnames1[0], outfilenames1[0], outvarnames1[0], lon1, lon2, lat1, lat2,roto)

#quit(-1)

huba=len(infilenames1) for n in range (1,huba): ofnamee="./"+areaname+"_"+outvarnames1[n]+"_"+str(n)+".nc" sofnamee="./"+areaname+"_"+outvarnames1[n]+"_"+str(n)+"_s.nc" print(ofnamee) print(sofnamee) outfilenames1.append(ofnamee) soutfilenames1.append(sofnamee)

if(input_rasters==1): print("PP ",infilenames1[n]) preprocess_big_raster(infilenames1[n], invarnames1[n], outfilenames1[0], outfilenames1[n], soutfilenames1[n-1], outvarnames1[n], lon1, lon2, lat1, lat2, width1, height1, roto)


imgs=[] pointsx=[] pointsy=[] pointsz=[] mlats=[] mlons=[]

spointsx=[] spointsy=[] spointsz=[] simgs=[] slats=[] slons=[]

len1=len(outfilenames1)

for n in range(0,len1): imgs.append(loadnetcdf_single_tomem(outfilenames1[n], "Band1")) mlons.append(cache_lons1) mlats.append(cache_lats1) makepoints(imgs[n], mlons[n], mlats[n]) pointsx.append(cache_x) pointsy.append(cache_y) pointsz.append(cache_z)

len1=len(soutfilenames1)

for n in range(0,len1): simgs.append(loadnetcdf_single_tomem(soutfilenames1[n], "Band1")) slons.append(cache_lons1) slats.append(cache_lats1) makepoints(simgs[n], slons[n], slats[n]) spointsx.append(cache_x) spointsy.append(cache_y) spointsz.append(cache_z)


if(debug==1): print("Specie habitation.") load_xy("humanlgm.txt","Lon", "Lat") klats1=cache_lats1 klons1=cache_lons1

spointszout=[] ppointsx=[] ppointsy=[] ppointsz=[]

gdal_get_points_from_file(outfilenames1[1], invarnames1[1], klons1, klats1) ppointsx.append(cache_x) ppointsy.append(cache_y) ppointsz.append(cache_z)

gdal_get_points_from_file(outfilenames1[2], invarnames1[2], klons1, klats1) ppointsx.append(cache_x) ppointsy.append(cache_y) ppointsz.append(cache_z)

#gdal_get_points_from_file(outfilenames1[2], invarnames1[2], klons1, klats1) #ppointsx.append(cache_x) #ppointsy.append(cache_y) #ppointsz.append(cache_z) #bpoints1=ppointsz[0] #apoints1=ppointsz[0] #cpoints1=spointsz[0]

bpoints1=ppointsz[0] apoints1=ppointsz[0] cpoints1=spointsz[0]

spointszout.append(probability_single_var(apoints1, bpoints1, cpoints1))

bpoints1=ppointsz[1] apoints1=ppointsz[1] cpoints1=spointsz[1]

spointszout.append(probability_single_var(apoints1, bpoints1, cpoints1))

#bpoints1=ppointsz[2] #apoints1=ppointsz[2] #cpoints1=spointsz[2]

#spointszout.append(probability_single_var(apoints1, bpoints1, cpoints1))

odex1=0 sdataout=spointszout[0]*spointszout[1]

pointsxout1=spointsx[0] pointsyout1=spointsy[0]

slonsout1=slons[0] slatsout1=slats[0]

save_points_to_netcdf(sresultfilename1, sresultvarname1, slonsout1, slatsout1, sdataout)

plt.imshow( np.array(sdataout).reshape(len(slatsout1), len(slonsout1) ) ) plt.show() return(1)

## main sektion of ds

sla=[]

len1=len(pointsz)

for n in range(1,len1): sla.append(pointsz[n])

slb=[]

for n in range(0,len1-1): print (n) slb.append(spointsz[n])

apoints1=list(zip(*sla)) bpoints1=pointsz[0] cpoints1=list(zip(*slb))

spointszout=[]

if(DS_input_log==0): spointszout.append(random_forest_multiple_vars(apoints1, bpoints1, cpoints1)) spointszout.append(gam_multiple_vars(apoints1, bpoints1, cpoints1)) spointszout.append(linear_regression_multiple_vars(apoints1, bpoints1, cpoints1))

if(DS_input_log==1): spointszout.append(np.exp(random_forest_multiple_vars(apoints1, np.log(bpoints1), cpoints1))) spointszout.append(np.exp(gam_multiple_vars(apoints1, np.log(bpoints1), cpoints1))) spointszout.append(np.exp(linear_regression_multiple_vars(apoints1, np.log(bpoints1), cpoints1)))

#sdataout=average_tables(spointszout[0],spointszout[1]) if(DS_method==0): sdataout=spointszout[0] if(DS_method==1): sdataout=spointszout[1] if(DS_method==2): sdataout=spointszout[2]


if(DS_method==77): sdataout=average_tables(spointszout[0],spointszout[2] )

pointsxout1=spointsx[0] pointsyout1=spointsy[0] slonsout1=slons[0] slatsout1=slats[0] save_points_to_netcdf(sresultfilename1, sresultvarname1, slonsout1, slatsout1, sdataout)

#plt.register_cmap(name='viridis', cmap=cmaps.viridis) #plt.set_cmap(cm.viridis)

#cmaps = OrderedDict() #kolormap='jet' #kolormap='Spectral_r' #kolormap='gist_rainbow_r' kolormap='BrBG' #kolormap='rainbow' #kolormap='viridis'

if(DS_show==1): plt.imshow(np.array(sdataout).reshape(len(slatsout1), len(slonsout1)) , cmap=kolormap) plt.ylim(0, len(slatsout1)) plt.show()

return(0)


    1. attempt to post process rain in mountains!

def manipulate_rainfall_data(demname, rainname, oname2): # try to exaggerate rainfall in mountains dem1=loadnetcdf_single_tomem(demname,"Band1") rain0=loadnetcdf_single_tomem(rainname,"Rain") rain1=np.flipud(rain0) shape1=np.shape(rain1) print(shape1)

dem2=np.ravel(dem1) rain2=np.ravel(rain1) len1=len(rain2) #print (len1) outta1=rain2*0 limith1=800 for n in range(0,len1): r=rain2[n] o=r d=dem2[n] rk=d/limith1 if (d>limith1): o=r*rk outta1[n]=o

out00=np.reshape(outta1,shape1)

#out1=np.flipud(out00) out1=out00

savenetcdf_single_frommem(oname2, "Rain", out1,cache_lats1, cache_lons1) return(out1)


def match_raster(iname,matchname, oname): loadnetcdf_single_tomem(iname, "Band1") lon1=np.min(cache_lons1) lon2=np.max(cache_lons1) lat1=np.min(cache_lats1) lat2=np.max(cache_lats1) gdal_cut_resize_fromnc_tonc(iname, matchname, oname, "Rain",lon1, lon2, lat1, lat2)




              1. main program


input_rasters=1 debug=0 ## human habitation test

    1. acquire basic data, loadfiles=1

loadfiles=1

intime1=1

  1. lat1=30
  2. lat2=70
  3. lon1=-20
  4. lon2=80


lon1=-4.5 lon2=10.0 lat1=42.0 lat2=50.5

  1. post_ext<-c(-4.0,6.0,40.0,50.0 )
  1. lon1=-4.0
  2. lon2=6.0
  3. lat1=40.0
  4. lat2=50.0

width1=130 height1=112

  1. jn
  2. width1=112
  3. height1=130


  1. JN WARNING
  2. width1=200
  3. height1=200


roto=1

areaname="france"

sresultfilename1="dskaled1.nc" sresultvarname1="Rain"

infilenames1=[] invarnames1=[]

infilenames1.append('./rain.nc')

infilenames1.append('./accu_vel_w.nc') infilenames1.append('./accu_dem_1.nc') infilenames1.append('./accu_dir.nc') infilenames1.append('./accu_distance_1.nc') infilenames1.append('./accu_lon_1.nc') infilenames1.append('./accu_lat_1.nc')


  1. infilenames1.append('./daata/big_1.nc')

invarnames1.append("Rain") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1")

RF_estimators1=50 RF_features1=6

DS_method=0 DS_show=1 DS_input_log=0

downscale_data_1(input_rasters,loadfiles,intime1,lon1,lon2,lat1,lat2,width1,height1,roto,areaname,sresultfilename1,sresultvarname1,infilenames1, invarnames1)


print(".")

  1. quit(-1)


print("Second")

    1. second ds, optional!

width1=400 height1=400

roto=1

areaname="frances"

sresultfilename1="dskaled2.nc" sresultvarname1="Rain"

infilenames1=[] invarnames1=[]

infilenames1.append('./dskaled1.nc') infilenames1.append('./saccu_dem_1.nc') infilenames1.append('./saccu_distance_1.nc') infilenames1.append('./saccu_lon_1.nc') infilenames1.append('./saccu_lat_1.nc') infilenames1.append('./saccu_aspect_1.nc') infilenames1.append('./saccu_slope_1.nc')


  1. infilenames1.append('./daata/big_1.nc')

invarnames1.append("Rain") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1") invarnames1.append("Band1")


RF_estimators1=50 RF_features1=6

DS_method=1 DS_show=1 DS_input_log=0


downscale_data_1(input_rasters,loadfiles,intime1,lon1,lon2,lat1,lat2,width1,height1,roto,areaname,sresultfilename1,sresultvarname1,infilenames1, invarnames1)


iname="saccu_dem_1.nc" matchname="dskaled2.nc" oname="ddem1.nc" rainname="dskaled2.nc" oname2="dskaled3.nc"


match_raster(iname, matchname, oname) put00=manipulate_rainfall_data(oname, rainname, oname2)

  1. plt.imshow(put00)
  2. plt.show()


print(".")




Licensing

[edit]
I, the copyright holder of this work, hereby publish it under the following license:
w:en:Creative Commons
attribution share alike
This file is licensed under the Creative Commons Attribution-Share Alike 4.0 International license.
You are free:
  • to share – to copy, distribute and transmit the work
  • to remix – to adapt the work
Under the following conditions:
  • attribution – You must give appropriate credit, provide a link to the license, and indicate if changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use.
  • share alike – If you remix, transform, or build upon the material, you must distribute your contributions under the same or compatible license as the original.

File history

Click on a date/time to view the file as it appeared at that time.

Date/TimeThumbnailDimensionsUserComment
current14:14, 10 November 2021Thumbnail for version as of 14:14, 10 November 20211,616 × 1,072 (1.59 MB)Merikanto (talk | contribs)Uploaded own work with UploadWizard

There are no pages that use this file.