diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..12c21bd --- /dev/null +++ b/.gitignore @@ -0,0 +1,161 @@ +### Python template +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ diff --git a/README.txt b/README.txt index 25339a9..e43d890 100644 --- a/README.txt +++ b/README.txt @@ -16,8 +16,6 @@ The new Doris version consists of 2 parts: contain algorithms specific to processing sentinel 1 images and support parallelisation of the processing of the bursts. The functionality of these scripts can be further extended to support more sensors and modes. -Note that the python code is developed in python 2.7, so be sure you are not using python 3. - In addition, you will find a stack preparation script, to automatically download the burst you need for your Area of Interest which you defined by a shape file, automatically download the SRTM DEM associated with this area, and setup your processing structure. @@ -65,7 +63,7 @@ Run the stack preparation script Move to the prepare_stack directory: cd prepare_stack Run the python script: -python prepare_datastack_main.py +python prepare_datastack.py This code will ask you to define the different folders you created before. The script will ask you whether you want to run your code in parallel. Generally, this is recommended as it speeds up your processing speed. Note that either the diff --git a/__init__.pyc b/__init__.pyc deleted file mode 100644 index 783020f..0000000 Binary files a/__init__.pyc and /dev/null differ diff --git a/bin/tsx_dump_header2doris.py b/bin/tsx_dump_header2doris.py index bb06314..daddc2b 100755 --- a/bin/tsx_dump_header2doris.py +++ b/bin/tsx_dump_header2doris.py @@ -12,16 +12,16 @@ #import types def usage(): - print '\nUsage: python tsx_dump_header2doris.py tsx_XML_product > outputfile' - print ' where tsx_XML_product is the input filename' + print ('\nUsage: python tsx_dump_header2doris.py tsx_XML_product > outputfile') + print (' where tsx_XML_product is the input filename') # print ' outputfile is the output DORIS resultfile' try: inputFileName = sys.argv[1] -# outputFileName = sys.argv[2] -# outStream = open(outputFileName,'w') + #outputFileName = sys.argv[2] + #outStream = open(outputFileName,'w') except: - print 'Unrecognized input' + print ('Unrecognized input') usage() sys.exit(1) @@ -127,22 +127,23 @@ def hms2sec(hmsString,convertFlag='int'): dummyVar = 'DUMMY' -# outStream.write('MASTER RESULTFILE: %s\n' % outputFileName) -# outStream.write('\n') -# outStream.write('\n') -# outStream.write('Start_process_control\n') -# outStream.write('readfiles: 1\n') -# outStream.write('precise_orbits: 0\n') -# outStream.write('crop: 0\n') -# outStream.write('sim_amplitude: 0\n') -# outStream.write('master_timing: 0\n') -# outStream.write('oversample: 0\n') -# outStream.write('resample: 0\n') -# outStream.write('filt_azi: 0\n') -# outStream.write('filt_range: 0\n') -# outStream.write('NOT_USED: 0\n') -# outStream.write('End_process_control\n') -# outStream.write('\n') +#print('MASTER RESULTFILE: %s\n' % outputFileName) +print('\n') +print('\n') +print('Start_process_control') +print('readfiles: 1') +print('precise_orbits: 0') +print('crop: 0') +print('sim_amplitude: 0') +print('master_timing: 0') +print('oversample: 0') +print('resample: 0') +print('filt_azi: 0') +print('filt_range: 0') +print('NOT_USED: 0') +print('End_process_control\n') +print('\n') + print('\ntsx_dump_header2doris.py v1.0, doris software, 2009\n') print('*******************************************************************') print('*_Start_readfiles:') diff --git a/doris_core/#referencephase.cc# b/doris_core/#referencephase.cc# new file mode 100755 index 0000000..1ca207f --- /dev/null +++ b/doris_core/#referencephase.cc# @@ -0,0 +1,2669 @@ +/* + * Copyright (c) 1999-2009 Delft University of Technology, The Netherlands + * + * This file is part of Doris, the Delft o-o radar interferometric software. + * + * Doris program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * Doris is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + * + * + */ +/**************************************************************** + * $Source: /users/kampes/DEVELOP/DORIS/doris/src/RCS/referencephase.cc,v $ * + * $Revision: 3.22 $ * + * $Date: 2006/05/18 11:09:20 $ * + * $Author: kampes $ * + * * + * -computation flat earth correction. * + * -computation radarcoding dem + interpolation to (1,1) grid * + ****************************************************************/ + +#include "matrixbk.hh" +#include "orbitbk.hh" +#include "slcimage.hh" // my slc image class +#include "productinfo.hh" // my 'products' class +#include "constants.hh"#include "referencephase.hh" // proto types +#include "ioroutines.hh" // error etc. +#include "utilities.hh" // isodd; ones(), etc. +#include "coregistration.hh" // distribute points +#include "exceptions.hh" // my exceptions class + +#include // setw only for test.. +#include // system +#include // max +#ifdef WIN32 + // Jia defined this. + // Bert Kampes, 24-Aug-2005 + #include "winsock2.h" +#else + #include // ntohl byteorder x86-HP unix +#endif + +// Using A Two-Dimensional Quality Mesh Generator and Delaunay Triangulator +// from Jonathan Richard Shewchuk +// Some definition for triangulate call +#define VOID int +#define REAL double +#define ANSI_DECLARATORS +#include "triangle.h" + + + +/**************************************************************** + * flatearth * + * * + * Compute polynomial model for 'flat earth' correction. * + * fie(l,p) = sumj=0:d sumk=0:d Ajk l^j p^k (NOT bert 8sept99) * + * precise orbits are used to compute delta range for Npoints * + * after which the polynomial model is fitted (LS). * + * * + * input: * + * - inputoptions * + * - info structs * + * - platform data points * + * output: * + * - void (result to file "scratchresflat") * + * - coefficients normalized wrt. original window of master * + * * + * Bert Kampes, 09-Mar-1999 * + * Bert Kampes, 26-Oct-1999 normalization of coeff., * + * dump to logfile: var(unknowns) == diag(inv(AtA)) * + ****************************************************************/ +void flatearth( + const input_comprefpha &comprefphainput, + const input_ell &ellips, + const slcimage &master, + const slcimage &slave, + const productinfo &interferogram, + orbit &masterorbit, + orbit &slaveorbit) + { + TRACE_FUNCTION("flatearth (BK 26-Oct-1999)") + const int32 MAXITER = 10; + const real8 CRITERPOS = 1e-6; + const real8 CRITERTIM = 1e-10; + char dummyline[2*ONE27]; + + INFO << "FLATEARTH: MAXITER: " << MAXITER << "; " + << "CRITERPOS: " << CRITERPOS << " m; " + << "CRITERTIM: " << CRITERTIM << " s"; + INFO.print(); + + // ______ Normalization factors for polynomial ______ + const real8 minL = master.originalwindow.linelo; + const real8 maxL = master.originalwindow.linehi; + const real8 minP = master.originalwindow.pixlo; + const real8 maxP = master.originalwindow.pixhi; + INFO << "flatearth: polynomial normalized by factors: " + << minL << " " << maxL << " " << minP << " " << maxP + << " to [-2,2]"; + INFO.print(); + + // ______Handling of input______ + const real8 m_minpi4cdivlam = (-4.0*PI*SOL)/master.wavelength; + const real8 s_minpi4cdivlam = (-4.0*PI*SOL)/slave.wavelength; + DEBUG << "master wavelength = " << master.wavelength; + DEBUG.print(); + DEBUG << "slave wavelength = " << slave.wavelength; + DEBUG.print(); + const int32 DEGREE = comprefphainput.degree; + const int32 Nunk = Ncoeffs(DEGREE); // Number of unknowns + bool pointsrandom = true; + if (specified(comprefphainput.ifpositions)) + pointsrandom = false; // only use those points + + + + // ______ Distribute points wel distributed over win ______ + // ______ or read from ascii file ______ + // ______(i,0): line, (i,1): pixel, (i,2) flagfromdisk______ + //matrix Position; + // [FvL] for correct folding of points outside overlap window when inserted by file + matrix Position; + const uint Npoints = comprefphainput.Npoints; + register int32 i,j,k,index; + + if (pointsrandom) // no filename specified + { + Position = distributepoints(Npoints,interferogram.win); + } + else // read from file + { + Position.resize(Npoints,3); + //ifstream ifpos(comprefphainput.ifpositions, ios::in); + ifstream ifpos; + openfstream(ifpos,comprefphainput.ifpositions); + bk_assert(ifpos,comprefphainput.ifpositions,__FILE__,__LINE__); + uint ll,pp; + for (i=0; i> ll >> pp; + //Position(i,0) = uint(ll); + //Position(i,1) = uint(pp); + //Position(i,2) = uint(1); // flag from file + // [FvL] + Position(i,0) = int(ll); + Position(i,1) = int(pp); + Position(i,2) = int(1); // flag from file + ifpos.getline(dummyline,2*ONE27,'\n'); // goto next line. + } + ifpos.close(); + + // ______ Check last point ivm. EOL after last position in file ______ + if (Position(Npoints-1,0) == Position(Npoints-2,0) && + Position(Npoints-1,1) == Position(Npoints-2,1)) + { + Position(Npoints-1,0) = uint(.5*(minL + maxL) + 27); // random + Position(Npoints-1,1) = uint(.5*(minP + maxP) + 37); // random + WARNING << "refpha: there should be no EOL after last point in file: " + << comprefphainput.ifpositions; + WARNING.print(); + } + + // ______ Check if points are in overlap ______ + // ______ no check for uniqueness of points ______ + } + + matrix y(Npoints,1); // observation + matrix y_h2ph(Npoints,1); // observation, h2ph factors, added by FvL + matrix A(Npoints,Nunk); // designmatrix + + // ______Check redundancy______ + if (Npoints < Nunk) + { + PRINT_ERROR("flatearth: Number of points is smaller than parameters solved for."); + throw(input_error); + } + + + + // ======Compute delta r for all points====== + for (i=0; iSP) then S is to the right of slant line, then B perp is positive. + cn r1 = Psat_master.min(P); + cn r2 = Psat_slave.min(P); + // real8 theta = Psat_master.angle(r1); // look angle + real8 theta = P.angle(r1); // incidence angle + real8 theta_slave = P.angle(r2); // incidence angle slave + real8 Bperp = (theta > theta_slave ) ? // sign ok + sqrt(sqr(B)-sqr(Bpar)) : + -sqrt(sqr(B)-sqr(Bpar)) ; + + y_h2ph(i,0) = Bperp/(m_trange*SOL*sin(theta)); + + // ____________________________________________________________________________________ + // _____________ End added part by FvL ________________________________________________ + //_____________________________________________________________________________________ + + // ______Set up system of equations______ + // ______Order unknowns: A00 A10 A01 A20 A11 A02 A30 A21 A12 A03 for degree=3______ + // ______ normalize data [-2,2] ______ + real8 posL = normalize(line,minL,maxL); + real8 posP = normalize(pixel,minP,maxP); + index = 0; + for (j=0; j<=DEGREE; j++) + { + for (k=0; k<=j; k++) + { + A(i,index) = pow(posL,real8(j-k)) * pow(posP,real8(k)); + index++; + } + } + } + + + // ======Compute polynomial for these phases (LS)====== + // ______Compute Normalmatrix, rghthandside______ + matrix N = matTxmat(A,A); + matrix rhs = matTxmat(A,y); + matrix rhs_h2ph = matTxmat(A,y_h2ph); // Added by FvL, same A matrix can be used + + + // ______Compute solution______ + matrix Qx_hat = N; + choles(Qx_hat); // Cholesky factorisation normalmatrix + solvechol(Qx_hat,rhs); // Estimate of unknowns in rhs + solvechol(Qx_hat,rhs_h2ph); // Estimate of unknowns in rhs_h2ph, added by FvL + invertchol(Qx_hat); // Covariance matrix + + + // ______Test inverse______ + for (i=0; i .001) + { + WARNING << "Deviation quite large. Decrease degree or number of points?"; + WARNING.print(); + } + else + { + INFO.print("Deviation is OK."); + } + + // ______Some other stuff, scale is ok______ + // matrix Qy_hat = A * (matxmatT(Qx_hat,A)); + matrix y_hat = A * rhs; + matrix y_hat_h2ph = A * rhs_h2ph; // added by FvL + matrix e_hat = y - y_hat; + matrix e_hat_h2ph = y_h2ph - y_hat_h2ph; // added by FvL + + + // ______Overall model test (variance factor)______ + // ... ? + + + + + + // ______ Wrap offset ______ + // BK 30/9/99 do not do this, later absolute ref. phase is used. + // in s2h rodriguez for example. + // it does not change anything for compinterfero etc. + // rhs(0,0) = remainder(rhs(0,0),2*PI); + + + + // ______Write results to file______ + ofstream scratchlogfile("scratchlogflat", ios::out | ios::trunc); + bk_assert(scratchlogfile,"flatearth: scratchlogflat",__FILE__,__LINE__); + + scratchlogfile << "\n\n*******************************************************************" + << "\n* FLATEARTH: " + //<< "\n*_Start_" << processcontrol[pr_i_comprefpha] + << "\n*******************************************************************" + << "\nDegree_flat:\t" << DEGREE + << "\nEstimated coefficients:\n" + << "\nx_hat \tstd:\n"; + for (i=0; i= lat0file)// largest latitude at first line of file + { + ERROR << "master crop outside DEM: most North latitude: " << rad2deg(lat0file) + << " [deg]; master crop requires: " << rad2deg(phimax) + << " [deg]"; + PRINT_ERROR(ERROR.get_str()) + //throw(some_error); + } + if (lambdamax <= lon0file) + { + ERROR << "master crop outside DEM: most West longitude: " << rad2deg(lon0file) + << " [deg]; master crop window requires: " << rad2deg(lambdamax) + << " [deg]"; + PRINT_ERROR(ERROR.get_str()) + //throw(some_error); + } + if (lambdamin >= lonNfile) + { + ERROR << "master crop outside DEM: most East longitude: " << rad2deg(lonNfile) + << " [deg]; master crop window requires: " << rad2deg(lambdamin) + << " [deg]"; + PRINT_ERROR(ERROR.get_str()) + //throw(some_error); + } + + + //=================================================================== + //============ First loop: radarcode DEM ============================ + //============ (DEM geometry) ============================ + //=================================================================== + + int32 numvalid = 0;// number of good values, not NODATA in buffer + int32 numNODATA = 0;// number of NODATA values in buffer + real8 meancroppedDEM = 0.0;// to detect byte order problems, formats + real8 min_input_dem = 100000.0;// stats + real8 max_input_dem = -100000.0;// stats + + // ______ Compute buffer size radarcoding DEM______ + const real8 BUFFERMEMSIZE = generalinput.memory;// Bytes + int32 NcolsDEM = indexlambdaNDEM-indexlambda0DEM+1; + int32 NrowsDEM = indexphiNDEM-indexphi0DEM+1; + const uint32 NcolsDEMlog = NcolsDEM; // since NcolsDEM updated after getcorners() call. + const uint32 NrowsDEMlog = NrowsDEM; + const real8 Nrows_possible_DEM = BUFFERMEMSIZE / (5*8*NcolsDEM); + int32 bufferlines = int32(ceil(Nrows_possible_DEM)); // [MA] checked ok. Sinces SLC is not multilooked, see comprefdem for solution + if (bufferlines>NrowsDEM) bufferlines=NrowsDEM; + int32 numfullbuffers = NrowsDEM / bufferlines; + int32 restlines = NrowsDEM % bufferlines; + int32 extrabuffer = (restlines == 0) ? 0 : 1; + + // ______ Extra info ______ + INFO << "DEM output total pixels: " << NcolsDEM; + INFO.print(); + INFO << "DEM output total lines : " << NrowsDEM; + INFO.print(); + INFO << "Radar coding of DEM in: " << numfullbuffers << " buffers of " + << bufferlines << " lines and " << extrabuffer << " extra buffer of " + << restlines << " lines."; + INFO.print(); + + + + // ______ Open (temporary) output files ______ + // DEM heights + ofstream demofile; + openfstream(demofile,demassistinput.fodem,generalinput.overwrit); // dem_crop radarcoded + bk_assert(demofile,demassistinput.fodem,__FILE__,__LINE__); + + // master line coordinates of DEM + ofstream masterdemlineoutfile("dac_m_demline.temp", ios::out | ios::trunc); + bk_assert(masterdemlineoutfile,"dac_m_demline.temp",__FILE__,__LINE__); + + // master pixel coordinates of DEM + ofstream masterdempixeloutfile("dac_m_dempixel.temp", ios::out | ios::trunc); + bk_assert(masterdempixeloutfile,"dac_m_dempixel.temp",__FILE__,__LINE__); + + // delta line coordinates of DEM ( slave-master ) + ofstream deltademlineoutfile("dac_delta_demline.temp", ios::out | ios::trunc); + bk_assert(deltademlineoutfile,"dac_delta_demline.temp",__FILE__,__LINE__); + + // delta pixel coordinates of DEM + ofstream deltadempixeloutfile("dac_delta_dempixel.temp", ios::out | ios::trunc); + bk_assert(deltadempixeloutfile,"dac_delta_dempixel.temp",__FILE__,__LINE__); + + + + // ______ DEM loop per buffer ______ + register int32 j,i;// DEM index grid counter, register j first to ensure allocation + for (register int32 buffer=0; buffer DEM(blines,NcolsDEM); + + // ______ Extra info ______ + PROGRESS << STEP << "Buffer# [l0:lN, p0:pN]: " << buffer+1 << " [" + << indexphi0BUFFER << ": " << indexphiNBUFFER << ", " + << indexlambda0DEM << ": " << indexlambdaNDEM << "]"; + PROGRESS.print(); + + // ______ lat/lon for first pixel in matrix read from file ______ + // ______ upper is max. latitude, left is min. longitude ______ + const real8 upperleftphi = lat0file-indexphi0BUFFER*DEMdeltalat; + const real8 upperleftlambda = lon0file+indexlambda0DEM*DEMdeltalon; + + window zerooffset (0,0,0,0); + window winfromfile (indexphi0BUFFER,indexphiNBUFFER, + indexlambda0DEM,indexlambdaNDEM); + + // ______ Read in grdfile of DEM in matrix R4 (raw data, no header) _______ + // ______ added formats (BK 4-May-2001) ______ + PROGRESS << STEP << "Reading crop of DEM for buffer: " << buffer+1; + PROGRESS.print(); + DEBUG.print("Reading input DEM into real4 matrix (buffer)."); + switch (demassistinput.iformatflag) + { + // ______ Read as short BE, then convert to host order ______ + case FORMATI2_BIGENDIAN: + { + matrix DEMi2(blines,NcolsDEM); + readfile(DEMi2,demassistinput.firefdem,numberoflatpixels,winfromfile,zerooffset); + for (int32 iii=0; iii DEMi2(blines,NcolsDEM); + readfile(DEMi2,demassistinput.firefdem,numberoflatpixels,winfromfile,zerooffset); + for (int32 iii=0; iii DEMr8(blines,NcolsDEM); + readfile(DEMr8,demassistinput.firefdem,numberoflatpixels,winfromfile,zerooffset); + for (int32 iii=0; iiimax_dem_buffer) max_dem_buffer=DEM(i,j);// stats + } + else + { + numNODATA++; + } + }//loop dem for stats + }//loop dem for stats + min_input_dem = min(min_input_dem,min_dem_buffer);//global stats + max_input_dem = max(max_input_dem,max_dem_buffer);//global stats + + + // ====== Radarcoding DEM ============================== + // ______ DEM contains values from leftupper with ______ + // ______ spacing (DEMdeltalat,DEMdeltalon) ______ + // ______ Transform DEM to l,p,refphase ______ + PROGRESS.print("Converting DEM to radar system for this buffer."); + const int32 NpointsDEM = DEM.size(); + const int32 NpixelsDEM = DEM.pixels(); + // ______ Extra info ______ + INFO << "Number of points in DEM: " + << NpointsDEM; + INFO.print(); + + matrix masterDEMline(DEM.lines(),DEM.pixels()); + matrix masterDEMpixel(DEM.lines(),DEM.pixels()); + matrix deltaDEMline(DEM.lines(),DEM.pixels()); + matrix deltaDEMpixel(DEM.lines(),DEM.pixels()); + + // --- Loop DEM --- + real8 phi,lambda,height,m_l,m_p,s_l,s_p; + + + phi = upperleftphi; + for (i=0; i Nlinesml) bufferlines=Nlinesml; + numfullbuffers = Nlinesml / bufferlines; + restlines = Nlinesml % bufferlines; // the number of lines in extra buffer + extrabuffer = (restlines == 0) ? 0 : 1; + + // ______ Extra info ______ + INFO << "Interpolation in: " << numfullbuffers << " buffers of " + << bufferlines << " lines and " << extrabuffer << " extra buffer of " + << restlines << " lines."; + INFO.print(); + + + // ______ Open output files ______ + ofstream deltalineofile("dac_delta_line.raw", ios::out | ios::trunc); + bk_assert(deltalineofile,"dac_delta_line.raw",__FILE__,__LINE__); + + ofstream deltapixelofile("dac_delta_pixel.raw", ios::out | ios::trunc); + bk_assert(deltapixelofile,"dac_delta_pixel.raw",__FILE__,__LINE__); + + // if request for height in radar coordinates l,p + ofstream refdemheiofile; + if (outputrefdemhei==true) + { + openfstream(refdemheiofile,demassistinput.forefdemhei,generalinput.overwrit); + bk_assert(refdemheiofile,demassistinput.forefdemhei,__FILE__,__LINE__); + } + + // ______ interpolation loop per buffer ______ + for (register int32 buffer = 0; buffer < numfullbuffers + extrabuffer; ++buffer) + { + + // Determine indices for buffer + const int32 blines = (buffer == numfullbuffers) ? restlines : bufferlines; + const real8 firstline_buffer = veryfirstline+buffer*bufferlines; + const real8 lastline_buffer = firstline_buffer+blines-1; + + // ______ Extra info ______ + PROGRESS << STEP << "Interpolation buffer: " << buffer+1 << "of" << numfullbuffers + extrabuffer << " [l0:lN, p0:pN]: " << " [" + << firstline_buffer << ": " << lastline_buffer << ", " + << firstpixel << ": " << lastpixel << "]"; + PROGRESS.print(); + + // Get corners of buffer + real8 phimin_az; + real8 phimax_az; + real8 lambdamin_az; + real8 lambdamax_az; + getcorners(firstline_buffer,lastline_buffer, + firstpixel,lastpixel, + extralat,extralong,phimax,lambdamin, + DEMdeltalat,DEMdeltalon,NrowsDEM,NcolsDEM, + ellips,master,masterorbit,phimin_az,phimax_az,lambdamin_az,lambdamax, + indexphi0DEM,indexphiNDEM,indexlambda0DEM,indexlambdaNDEM); + + window zerooffset (0,0,0,0); + window winfromfile (indexphi0DEM,indexphiNDEM, + indexlambda0DEM,indexlambdaNDEM); + const int32 NrowsDEM_buffer = indexphiNDEM-indexphi0DEM+1; + const int32 NcolsDEM_buffer = indexlambdaNDEM-indexlambda0DEM+1; + + PROGRESS << STEP << "Reading input for interpolation buffer: " << buffer+1 << "of" << numfullbuffers + extrabuffer; + PROGRESS.print(); + + // read x,y + matrix DEMline_buffer(NrowsDEM_buffer,NcolsDEM_buffer); + matrix DEMpixel_buffer(NrowsDEM_buffer,NcolsDEM_buffer); + + readfile(DEMline_buffer,"dac_m_demline.temp",NrowsDEM,winfromfile,zerooffset); + readfile(DEMpixel_buffer,"dac_m_dempixel.temp",NrowsDEM,winfromfile,zerooffset); + + // read z (multiple, number can easily be increased, e.g. simulated intensity) + int32 Nz = 2; //number of z + matrix input_buffer(NrowsDEM_buffer *Nz ,NcolsDEM_buffer); + matrix temp_input_buffer(NrowsDEM_buffer,NcolsDEM_buffer); + if (outputrefdemhei==true) + { + Nz += 1; + input_buffer.resize(NrowsDEM_buffer *Nz ,NcolsDEM_buffer); + } + + readfile(temp_input_buffer,"dac_delta_demline.temp",NrowsDEM,winfromfile,zerooffset); + input_buffer.setdata(0, 0, temp_input_buffer); + readfile(temp_input_buffer,"dac_delta_dempixel.temp",NrowsDEM,winfromfile,zerooffset); + input_buffer.setdata(NrowsDEM_buffer, 0, temp_input_buffer); + Nz = 2; + if (outputrefdemhei==true) + { + Nz += 1; + /// i would like to use real4, test later on + matrix dem_input(NrowsDEM_buffer,NcolsDEM_buffer); + readfile(dem_input,demassistinput.fodem,NrowsDEM,winfromfile,zerooffset); + for (register int32 i =0 ; i < NrowsDEM_buffer ; i ++) + for(register int32 j = 0; j < NcolsDEM_buffer; j++) + temp_input_buffer(i,j) = real8(dem_input(i,j)); + input_buffer.setdata(NrowsDEM_buffer * (Nz-1), 0, temp_input_buffer); + } + + // initialize output array + Nz = 2; + matrix output_buffer(blines * Nz, Npixelsml); + if (outputrefdemhei==true) + { + Nz += 1; + output_buffer.resize(blines * Nz, Npixelsml); + } + + // interpolation + griddatalinear(DEMline_buffer,DEMpixel_buffer,input_buffer, + firstline_buffer,lastline_buffer,firstpixel,lastpixel, + 1,1,r_az_ratio,0,NODATA,output_buffer); + + deltalineofile << output_buffer(window(0, blines - 1, 0, Npixelsml -1 )); + deltapixelofile << output_buffer(window(blines , 2 * blines - 1, 0, Npixelsml -1 )); + Nz = 2; + if (outputrefdemhei==true) + { + Nz += 1; + refdemheiofile << output_buffer(window((Nz-1) * blines,Nz * blines - 1, 0, Npixelsml -1 )); + } + + DEMline_buffer.resize(1,1); + DEMpixel_buffer.resize(1,1); + input_buffer.resize(1,1); + temp_input_buffer.resize(1,1); + output_buffer.resize(1,1); + + } // end loop azimuth direction + + INFO << "Closing output files"; + INFO.print(); + + deltalineofile.close(); + deltapixelofile.close(); + if (outputrefdemhei==true) // Radarcoded DEM + refdemheiofile.close(); + + //=================================================================== + //============ End second loop: interpolation ============= + //============ (radar geometry) ============= + //=================================================================== + + + //=================================================================== + //============ Determine inverse transformation ============= + //============ (slave corners only, needed for overlap) ============= + //=================================================================== + + real8 line, pixel; + real8 deltaline_slave00,deltapixel_slave00, + deltaline_slave0N,deltapixel_slave0N, + deltaline_slaveN0,deltapixel_slaveN0, + deltaline_slaveNN,deltapixel_slaveNN; + real8 phimin_az,phimax_az,lambdamin_az,lambdamax_az; + + + for (register int16 corner = 0 ; corner < 4 ; corner ++) + { + + PROGRESS << "Radarcoding slave corner: " << corner+1; + PROGRESS.print(); + + switch (corner) + { + case 0: + { + line=slave.currentwindow.linelo; + pixel=slave.currentwindow.pixlo; + break; + } + case 1: + { + line=slave.currentwindow.linelo; + pixel=slave.currentwindow.pixhi; + break; + } + case 2: + { + line=slave.currentwindow.linehi; + pixel=slave.currentwindow.pixlo; + break; + } + case 3: + { + line=slave.currentwindow.linehi; + pixel=slave.currentwindow.pixhi; + break; + } + default: + PRINT_ERROR("totally impossible, checked input."); + + } + + //use getcorners with line,line,pixel,pixel for single point + getcorners(line,line,pixel,pixel, + extralat,extralong,lat0file,lon0file, + DEMdeltalat,DEMdeltalon,numberoflatpixels,numberoflonpixels, + ellips,slave,slaveorbit, + phimin_az,phimax_az,lambdamin_az,lambdamax, + indexphi0DEM,indexphiNDEM,indexlambda0DEM,indexlambdaNDEM); + + + NcolsDEM = indexlambdaNDEM-indexlambda0DEM+1; + NrowsDEM = indexphiNDEM-indexphi0DEM+1; + const real8 upperleftphi = lat0file-indexphi0DEM*DEMdeltalat; + const real8 upperleftlambda = lon0file+indexlambda0DEM*DEMdeltalon; + + window zerooffset (0,0,0,0); + window winfromfile (indexphi0DEM,indexphiNDEM, + indexlambda0DEM,indexlambdaNDEM); + + + // ______ Read in DEM in matrix R4 (raw data, no header) _______ + + matrix DEM(NrowsDEM,NcolsDEM); + + switch (demassistinput.iformatflag) + { + // ______ Read as short BE, then convert to host order ______ + case FORMATI2_BIGENDIAN: + { + matrix DEMi2(NrowsDEM,NcolsDEM); + readfile(DEMi2,demassistinput.firefdem,numberoflatpixels,winfromfile,zerooffset); + for (int32 iii=0; iii DEMi2(NrowsDEM,NcolsDEM); + readfile(DEMi2,demassistinput.firefdem,numberoflatpixels,winfromfile,zerooffset); + for (int32 iii=0; iii DEMr8(NrowsDEM,NcolsDEM); + readfile(DEMr8,demassistinput.firefdem,numberoflatpixels,winfromfile,zerooffset); + for (int32 iii=0; iii slaveDEMline(DEM.lines(),DEM.pixels()); + matrix slaveDEMpixel(DEM.lines(),DEM.pixels()); + matrix deltaDEMline(DEM.lines(),DEM.pixels()); + matrix deltaDEMpixel(DEM.lines(),DEM.pixels()); + + // --- Loop DEM --- + real8 phi,lambda,height,m_l,m_p,s_l,s_p; + + + phi = upperleftphi; + for (i=0; i input_buffer(DEM.lines()*2,DEM.pixels()); + input_buffer.setdata(0, 0, deltaDEMline); + input_buffer.setdata(DEM.lines(), 0, deltaDEMpixel); + + matrix output_buffer(2,1); + + griddatalinear(slaveDEMline,slaveDEMpixel,input_buffer, + line,line,pixel,pixel, + 1,1,r_az_ratio,0,NODATA,output_buffer); + + switch (corner) + { + case 0: + { + deltaline_slave00 = output_buffer(0,0); + deltapixel_slave00 = output_buffer(1,0); + INFO << "Deltaline_slave00: " << deltaline_slave00; + INFO.print(); + INFO << "Deltapixel_slave00: " << deltapixel_slave00; + INFO.print(); + break; + } + case 1: + { + deltaline_slave0N = output_buffer(0,0); + deltapixel_slave0N = output_buffer(1,0); + INFO << "Deltaline_slave0N: " << deltaline_slave0N; + INFO.print(); + INFO << "Deltapixel_slave0N: " << deltapixel_slave0N; + INFO.print(); + break; + } + case 2: + { + deltaline_slaveN0 = output_buffer(0,0); + deltapixel_slaveN0 = output_buffer(1,0); + INFO << "Deltaline_slaveN0: " << deltaline_slaveN0; + INFO.print(); + INFO << "Deltapixel_slaveN0: " << deltapixel_slaveN0; + INFO.print(); + break; + } + case 3: + { + deltaline_slaveNN = output_buffer(0,0); + deltapixel_slaveNN = output_buffer(1,0); + INFO << "Deltaline_slaveNN: " << deltaline_slaveNN; + INFO.print(); + INFO << "Deltapixel_slaveNN: " << deltapixel_slaveNN; + INFO.print(); + break; + } + default: + PRINT_ERROR("totally impossible, checked input."); + + } + + } + + //=================================================================== + //============ End determine inverse transformation ============= + //============ (slave corners only, needed for overlap) ============= + //=================================================================== + + + // ====== Write output information ====== + char croppeddemi[4*ONE27]; + strcpy(croppeddemi,"NO output requested"); + if (outputdemi) strcpy(croppeddemi,demassistinput.fodemi); + INFO << "Min. value of input DEM covering master: " << min_input_dem; + INFO.print(); + INFO << "Max. value of input DEM covering master: " << max_input_dem; + INFO.print(); + + ofstream scratchlogfile("scratchlogdemassist", ios::out | ios::trunc); + bk_assert(scratchlogfile,"demassist: scratchlogdemassist",__FILE__,__LINE__); + scratchlogfile + << "\n*******************************************************************" + << "\n* " << processcontrol[pr_i_demassist] + << "\n*******************************************************************" + << "\n1) DEM source file: \t" << demassistinput.firefdem + << "\nFormat: \t"; + switch (demassistinput.iformatflag) + { + case FORMATI2: + { + scratchlogfile << "SHORT SIGNED INTEGER (HOST ENDIANNESS)"; + break; + } + case FORMATI2_BIGENDIAN: + { + scratchlogfile << "SHORT SIGNED INTEGER, BIG ENDIAN"; + break; + } + case FORMATR4: + { + scratchlogfile << "REAL4 SIGNED FLOAT"; + break; + } + case FORMATR8: + { + scratchlogfile << "REAL8 SIGNED DOUBLE"; + break; + } + default: + { + scratchlogfile << "UNKNOWN? IMPOSSIBLE..."; + break; + } + } + scratchlogfile + << "\nByte order: \t" << "check it yourself..." + << "\nNumber of lines: \t" << numberoflatpixels + << "\nNumber of pixels: \t" << numberoflonpixels + << "\nResolution latitude: \t" << rad2deg(DEMdeltalat) << " [deg]" + << "\nResolution longitude: \t" << rad2deg(DEMdeltalon) << " [deg]" + << "\nMost West point in input DEM: \t" << rad2deg(lon0file) + << "\nMost East point in input DEM: \t" << rad2deg(lonNfile) + << "\nMost South point in input DEM: \t" << rad2deg(latNfile) + << "\nMost North point in input DEM: \t" << rad2deg(lat0file) + << "\nMin. value of input DEM covering master: " << min_input_dem + << "\nMax. value of input DEM covering master: " << max_input_dem + << "\n2) Output file cropped DEM: \t" << demassistinput.fodem //[FVL + << "\nFormat: \t" << "REAL4" + << "\nByte order: \t" << "(same as host)" + << "\nNumber of lines : \t" << NrowsDEMlog + << "\nNumber of pixels : \t" << NcolsDEMlog + << "\nDEM extend w/e/s/n : \t" << rad2deg(lambdamin) << "/" + << rad2deg(lambdamax) << "/" << rad2deg(phimin) << "/" << rad2deg(phimax) +// << "\nMean value: \t" << meancroppedDEM + << "\n3) Output file interpolated crop DEM: \t" << croppeddemi + << "\nFormat: \t" << "REAL4" + << "\nByte order: \t" << "(same as host)" + << "\nNumber of lines (multilooked): \t" << Nlinesml + << "\nNumber of pixels (multilooked): \t" << Npixelsml + << "\nDeltaline_slave00_dem: \t" << deltaline_slave00 + << "\nDeltapixel_slave00_dem: \t" << deltapixel_slave00 + << "\nDeltaline_slave0N_dem: \t" << deltaline_slave0N + << "\nDeltapixel_slave0N_dem: \t" << deltapixel_slave0N + << "\nDeltaline_slaveN0_dem: \t" << deltaline_slaveN0 + << "\nDeltapixel_slaveN0_dem: \t" << deltapixel_slaveN0 + << "\nDeltaline_slaveNN_dem: \t" << deltaline_slaveNN + << "\nDeltapixel_slaveNN_dem: \t" << deltapixel_slaveNN + << "\n*******************************************************************\n\n"; + scratchlogfile.close(); + + + ofstream scratchresfile("scratchresdemassist", ios::out | ios::trunc); + bk_assert(scratchresfile,"demassist: scratchresdemassist",__FILE__,__LINE__); + scratchresfile + << "\n\n*******************************************************************" + << "\n*_Start_" << processcontrol[pr_i_demassist] + << "\n*******************************************************************"; + scratchresfile + << "\nDEM source file: \t" << demassistinput.firefdem + << "\nMin. of input DEM: \t" << min_input_dem + << "\nMax. of input DEM: \t" << max_input_dem + << "\nFirst_line (w.r.t. original_master): \t" + << master.currentwindow.linelo + << "\nLast_line (w.r.t. original_master): \t" + << master.currentwindow.linehi + << "\nFirst_pixel (w.r.t. original_master): \t" + << master.currentwindow.pixlo + << "\nLast_pixel (w.r.t. original_master): \t" + << master.currentwindow.pixhi + << "\nNumber of lines: \t" << Nlinesml + << "\nNumber of pixels: \t" << Npixelsml + << "\nDeltaline_slave00_dem: \t" << deltaline_slave00 + << "\nDeltapixel_slave00_dem: \t" << deltapixel_slave00 + << "\nDeltaline_slave0N_dem: \t" << deltaline_slave0N + << "\nDeltapixel_slave0N_dem: \t" << deltapixel_slave0N + << "\nDeltaline_slaveN0_dem: \t" << deltaline_slaveN0 + << "\nDeltapixel_slaveN0_dem: \t" << deltapixel_slaveN0 + << "\nDeltaline_slaveNN_dem: \t" << deltaline_slaveNN + << "\nDeltapixel_slaveNN_dem: \t" << deltapixel_slaveNN + << "\n*******************************************************************" + << "\n* End_" << processcontrol[pr_i_demassist] << "_NORMAL" + << "\n*******************************************************************\n"; + scratchresfile.close(); + + + } // END demassist + + +/**************************************************************** + * radarcodedem (a.k.a comprefdem) * + * * + * Compute reference phase based on DEM (SRTM) * + * DEM on equiangular grid (lat/lon) assumed * + * DEM seems stored from North to South * + * * + * Freek van Leijen, 26-Sep-2007 * + ****************************************************************/ +void radarcodedem( + const input_gen &generalinput, + const input_ell &ellips, + const input_comprefdem &refdeminput, + const slcimage &master, + const slcimage &slave, + const productinfo &interferogram, + orbit &masterorbit, + orbit &slaveorbit) + { + TRACE_FUNCTION("radarcodedem (FvL 26-Sep-2007)") + + const string STEP="CRD: "; + const int32 MAXITER = 10; + const real8 CRITERPOS = 1e-6; + const real8 CRITERTIM = 1e-10; + + const real8 lat0file = refdeminput.demlatleftupper; // first pix on disk w02090 + const real8 lon0file = refdeminput.demlonleftupper; // first pix on disk + const real8 DEMdeltalat = refdeminput.demdeltalat; // in radians + const real8 DEMdeltalon = refdeminput.demdeltalon; // in radians + const int32 numberoflonpixels = refdeminput.demcols; // NCOLS on file + const int32 numberoflatpixels = refdeminput.demrows; // NROWS on file + const real8 NODATA = refdeminput.demnodata; // (BK 4 may 2001) + bool onlyrefphasetopo = !refdeminput.includerefpha; // true: phase DEM w.r.t. ellipsoid + const bool outputdemi = specified(refdeminput.fodemi); // if spec. then output + const bool outputh2ph = specified(refdeminput.foh2ph); // if spec. then output, added by FvL + const bool outputrefdemhei = specified(refdeminput.forefdemhei); + + // _____ start added by MA _____ + bool mlookedIFG = false; // true: ifg is multilooked + + int32 mlL = interferogram.multilookL; // initialize multilookfactor + int32 mlP = interferogram.multilookP; + const int32 &ifgmlL = interferogram.multilookL; // multilookfactor of interferogram + const int32 &ifgmlP = interferogram.multilookP; // multilookfactor of interferogram + if ( ifgmlL != 1 || ifgmlP != 1 ) // [MA] additional entry for Coherence comptation using refdem. + { // always do computation without multilooking + mlL = 1; // set multilookfactor for interpolation + mlP = 1; // set multilookfactor for interpolation + mlookedIFG = true; // dealing with mlooked ifg. + } + // _____ end added by MA _____ + + + + const real8 m_min4picdivlam = (-4.0*PI*SOL)/master.wavelength; + const real8 s_min4picdivlam = (-4.0*PI*SOL)/slave.wavelength; + DEBUG << "master wavelength = " << master.wavelength; + DEBUG.print(); + DEBUG << "slave wavelength = " << slave.wavelength; + DEBUG.print(); + + const real8 latNfile = lat0file-DEMdeltalat*(numberoflatpixels-1); // upper=max. lat value + const real8 lonNfile = lon0file+DEMdeltalon*(numberoflonpixels-1); // left=min. lon value + + // ______ Extra info ______ + INFO << "DEM input: w/e/s/n: \t" + << rad2deg(lon0file) << "/" << rad2deg(lonNfile) << "/" + << rad2deg(latNfile) << "/" << rad2deg(lat0file); + INFO.print(); + + // ______ Get corners of interferogram (approx) to select DEM ______ + // ______ in radians (if height were zero)______ + real8 extralat = (1.5*DEMdeltalat + deg2rad(4.0/25.0)); + real8 extralong = (1.5*DEMdeltalon + deg2rad(4.0/25.0)); + + real8 phimin; + real8 phimax; + real8 lambdamin; + real8 lambdamax; + int32 indexphi0DEM; + int32 indexphiNDEM; + int32 indexlambda0DEM; + int32 indexlambdaNDEM; + const uint &ifglinelo = interferogram.win.linelo; // [MA] win no-mlooked master coords + const uint &ifglinehi = interferogram.win.linehi; + const uint &ifgpixlo = interferogram.win.pixlo; + const uint &ifgpixhi = interferogram.win.pixhi; + + getcorners(ifglinelo,ifglinehi, + ifgpixlo,ifgpixhi, + extralat,extralong,lat0file,lon0file, + DEMdeltalat,DEMdeltalon,numberoflatpixels,numberoflonpixels, + ellips,master,masterorbit,phimin,phimax,lambdamin,lambdamax, + indexphi0DEM,indexphiNDEM,indexlambda0DEM,indexlambdaNDEM); + + // ______ Extra info ______ + INFO << "DEM input required: w/e/s/n: \t" + << rad2deg(lambdamin) << "/" << rad2deg(lambdamax) << "/" + << rad2deg(phimin) << "/" << rad2deg(phimax); + INFO.print(); + INFO << "For window (l0,lN,p0,pN): \t" + << ifglinelo << " " + << ifglinehi << " " + << ifgpixlo << " " + << ifgpixhi; + INFO.print(); + + + // ______ Check corners of DEM ______ + // check if DEM is appropriate for interferogram + // DEM should at least partially cover IFG + // note: phi is [90:-90] + if (phimax <= latNfile)// DEM is more north than IFG + { + ERROR << "IFG outside DEM: most South latitude: " << rad2deg(latNfile) + << " [deg]; IFG requires: " << rad2deg(phimax) + << " [deg]"; + PRINT_ERROR(ERROR.get_str()) + throw(some_error); + } + // DEM is more south than IFG + if (phimin >= lat0file)// largest latitude at first line of file + { + ERROR << "IFG outside DEM: most North latitude: " << rad2deg(lat0file) + << " [deg]; IFG requires: " << rad2deg(phimax) + << " [deg]"; + PRINT_ERROR(ERROR.get_str()) + throw(some_error); + } + if (lambdamax <= lon0file) + { + ERROR << "IFG outside DEM: most West longitude: " << rad2deg(lon0file) + << " [deg]; IFG window requires: " << rad2deg(lambdamax) + << " [deg]"; + PRINT_ERROR(ERROR.get_str()) + throw(some_error); + } + if (lambdamin >= lonNfile) + { + ERROR << "IFG outside DEM: most East longitude: " << rad2deg(lonNfile) + << " [deg]; IFG window requires: " << rad2deg(lambdamin) + << " [deg]"; + PRINT_ERROR(ERROR.get_str()) + throw(some_error); + } + + + //=================================================================== + //============ First loop: radarcode DEM ============================ + //============ (DEM geometry) ============================ + //=================================================================== + + int32 numvalid = 0;// number of good values, not NODATA in buffer + int32 numNODATA = 0;// number of NODATA values in buffer + real8 meancroppedDEM = 0.0;// to detect byte order problems, formats + real8 min_input_dem = 100000.0;// stats + real8 max_input_dem = -100000.0;// stats + + // ______ Compute buffer size radarcoding DEM______ + const real8 BUFFERMEMSIZE = generalinput.memory;// Bytes + const int32 NcolsDEM = indexlambdaNDEM-indexlambda0DEM+1; + const int32 NrowsDEM = indexphiNDEM-indexphi0DEM+1; + const real8 Nrows_possible_DEM = BUFFERMEMSIZE / (5*8*NcolsDEM); + int32 bufferlines = int32(ceil(Nrows_possible_DEM)); + INFO << "Possible max. buffer lines: " << bufferlines << " for " << BUFFERMEMSIZE << " memory size."; + INFO.print(); + if (bufferlines>NrowsDEM) bufferlines=NrowsDEM; + int32 numfullbuffers = NrowsDEM / bufferlines; + int32 restlines = NrowsDEM % bufferlines; + int32 extrabuffer = (restlines == 0) ? 0 : 1; + + // ______ Extra info ______ + INFO << "DEM output total pixels: " << NcolsDEM; + INFO.print(); + INFO << "DEM output total lines : " << NrowsDEM; + INFO.print(); + INFO << "Radar coding of DEM in: " << numfullbuffers << " buffers of " + << bufferlines << " lines and " << extrabuffer << " extra buffer of " + << restlines << " lines."; + INFO.print(); + + + + // ______ Open (temporary) output files ______ + // DEM heights + INFO< DEM(blines,NcolsDEM); + + // ______ Extra info ______ + PROGRESS << STEP << "Buffer# [l0:lN, p0:pN]: " << buffer+1 << " [" + << indexphi0BUFFER << ": " << indexphiNBUFFER << ", " + << indexlambda0DEM << ": " << indexlambdaNDEM << "]"; + PROGRESS.print(); + + // ______ lat/lon for first pixel in matrix read from file ______ + // ______ upper is max. latitude, left is min. longitude ______ + const real8 upperleftphi = lat0file-indexphi0BUFFER*DEMdeltalat; + const real8 upperleftlambda = lon0file+indexlambda0DEM*DEMdeltalon; + + window zerooffset (0,0,0,0); + window winfromfile (indexphi0BUFFER,indexphiNBUFFER, + indexlambda0DEM,indexlambdaNDEM); + + // ______ Read in grdfile of DEM in matrix R4 (raw data, no header) _______ + // ______ added formats (BK 4-May-2001) ______ + PROGRESS << STEP << "Reading crop of DEM for buffer: " << buffer+1; + PROGRESS.print(); + DEBUG.print("Reading input DEM into real4 matrix (buffer)."); + INFO<< "file info: name: " << refdeminput.firefdem <<", nof flat pixels, " << numberoflatpixels << endl; + INFO<< "file info, Format : " << refdeminput.iformatflag< DEMi2(blines,NcolsDEM); + readfile(DEMi2,refdeminput.firefdem,numberoflatpixels,winfromfile,zerooffset); + for (int32 iii=0; iii DEMi2(blines,NcolsDEM); + readfile(DEMi2,refdeminput.firefdem,numberoflatpixels,winfromfile,zerooffset); + for (int32 iii=0; iii DEMr8(blines,NcolsDEM); + readfile(DEMr8,refdeminput.firefdem,numberoflatpixels,winfromfile,zerooffset); + for (int32 iii=0; iiimax_dem_buffer) max_dem_buffer=DEM(i,j);// stats + } + else + { + numNODATA++; + } + }//loop dem for stats + }//loop dem for stats + min_input_dem = min(min_input_dem,min_dem_buffer);//global stats + max_input_dem = max(max_input_dem,max_dem_buffer);//global stats + + + // ====== Radarcoding DEM ============================== + // ______ DEM contains values from leftupper with ______ + // ______ spacing (DEMdeltalat,DEMdeltalon) ______ + // ______ Transform DEM to l,p,refphase ______ + PROGRESS.print("Converting DEM to radar system for this buffer."); + const int32 NpointsDEM = DEM.size(); + const int32 NpixelsDEM = DEM.pixels(); + // ______ Extra info ______ + INFO << "Number of points in DEM: " + << NpointsDEM; + INFO.print(); + + matrix masterDEMline(DEM.lines(),DEM.pixels()); + matrix masterDEMpixel(DEM.lines(),DEM.pixels()); + matrix ref_phase_array(DEM.lines(),DEM.pixels()); + matrix h2ph_array(DEM.lines(),DEM.pixels()); + + // --- Loop DEM --- + cn P; + real8 phi,lambda,height,l,p,ref_phase; + + + phi = upperleftphi; + for (i=0; i theta_slave ) ? // sign ok + sqrt(sqr(B)-sqr(Bpar)) : + -sqrt(sqr(B)-sqr(Bpar)) ; + + h2ph_array(i,j) = Bperp/(t_range_master*SOL*sin(theta)); +} + + + if (onlyrefphasetopo) // do not include flat earth phase + { + lp2xyz(l,p,ellips, // h==0 + master, masterorbit, + P,MAXITER,CRITERPOS); // P returned + + real8 t_range_flatearth,t_azi_dummy; + + xyz2t(t_azi_dummy,t_range_flatearth, + slave,slaveorbit, + P,MAXITER,CRITERTIM); // P on h=0 + ref_phase = s_min4picdivlam*t_range_flatearth- + s_min4picdivlam*t_range_slave; + } + else // include flatearth, ref.pha = phi_topo+phi_flatearth + { + ref_phase = + m_min4picdivlam*master.pix2tr(p)- + s_min4picdivlam*t_range_slave; + } + + ref_phase_array(i,j) = ref_phase; + + lambda += DEMdeltalon; + } // loop DEM pixels + + // ______ update latitude of next line ______ + phi -= DEMdeltalat; // upper left is max. value + } // loop DEM lines + + + // Write results to output files + PROGRESS << STEP << "Writing radar coded DEM to file, buffer: " << buffer+1 << " of " << numfullbuffers + extrabuffer ; + PROGRESS.print(); + + demofile << DEM; + masterdemlineoutfile << masterDEMline; + masterdempixeloutfile << masterDEMpixel; + demrefphaseoutfile << ref_phase_array; +if (outputh2ph==true) + demh2phoutfile << h2ph_array; + + masterDEMline.resize(1,1); //deallocate + masterDEMpixel.resize(1,1); //deallocate + DEM.resize(1,1); //deallocate + ref_phase_array(1,1); //deallocate + h2ph_array(1,1); //deallocate + } // buffer loop + + demofile.close(); + masterdemlineoutfile.close(); + masterdempixeloutfile.close(); + demrefphaseoutfile.close(); +if (outputh2ph==true) + demh2phoutfile.close(); + + + //=================================================================== + //============ End first loop: radarcode DEM ======================== + //============ (DEM geometry) ============================ + //=================================================================== + + + //=================================================================== + //============ Second loop: interpolation ============= + //============ (radar geometry) ============= + //=================================================================== + + INFO << STEP << "Start interpolation..."; + INFO.print(); + + // ______ Line/pixel of first point in original master coordinates ______ + // ______ maybe this should be changed to be x+(ml/2) ?? but depends on + // ______ definition of range_to_first_bin is to center or start.. + // Bert Kampes, 08-Apr-2005: chose center by adding ml/2 + const int32 Nlinesml = interferogram.win.lines() / mlL; // ifg lines when mlL = 1 (no multilooking) + const int32 Npixelsml = interferogram.win.pixels() / mlP; + const int32 ifgNlinesml = interferogram.win.lines() / ifgmlL; // for the result file when mlL != 1 + const int32 ifgNpixelsml = interferogram.win.pixels() / ifgmlP; + const real8 offset = 0; + +//cerr << "xNFO: linesnoml: " << Nlinesml << " pixnoml: " << Npixelsml << endl; +//cerr << "xNFO: ifglinesml: " << ifgNlinesml << " ifgpixml: " << ifgNpixelsml << endl; + + const real8 veryfirstline = real8(ifglinelo) + (real8(mlL)-1.0)/2.0; + const real8 verylastline = veryfirstline + real8((Nlinesml-1)*mlL); + const real8 firstpixel = real8(ifgpixlo) + (real8(mlP)-1.0)/2.0; + const real8 lastpixel = firstpixel + real8((Npixelsml-1)*mlP); + +//cerr << "xNFO: vl0:vlN " << veryfirstline << ":" << verylastline << " p1:pN " << firstpixel << ":" << lastpixel << endl; + + //Determine range-azimuth spacing ratio, needed for proper triangulation + cn P1, P2 , P3, P4; + lp2xyz(veryfirstline,firstpixel,ellips,master,masterorbit, + P1,MAXITER,CRITERPOS); + lp2xyz(veryfirstline,lastpixel,ellips,master,masterorbit, + P2,MAXITER,CRITERPOS); + lp2xyz(verylastline,firstpixel,ellips,master,masterorbit, + P3,MAXITER,CRITERPOS); + lp2xyz(verylastline,lastpixel,ellips,master,masterorbit, + P4,MAXITER,CRITERPOS); + + const real8 r_spacing = ( (P1.min(P2)).norm() + (P3.min(P4)).norm() ) / 2 /(lastpixel - firstpixel) ; + const real8 az_spacing = ( (P1.min(P3)).norm() + (P2.min(P4)).norm() ) /2 /(verylastline - veryfirstline); + const real8 r_az_ratio = r_spacing/az_spacing; + + INFO << "Interferogram azimuth spacing: " << az_spacing; + INFO.print(); + INFO << "Interferogram range spacing: " << r_spacing; + INFO.print(); + INFO << "Range-azimuth spacing ratio: " << r_az_ratio; + INFO.print(); + + // ______ Compute buffer size interpolation______ + const real8 Nlinesml_possible = BUFFERMEMSIZE / (6*8*Npixelsml); + bufferlines = int32(ceil(Nlinesml_possible)); // initialized + + if ( mlookedIFG == true) // if ifg is multilooked by a factor + { + bufferlines = int32( floor(Nlinesml_possible/ifgmlL) * ifgmlL ); // [HB] Hermann provided the fix: + // Successive bufferlines must have a size which is multiple of multilooking factor + // unless data can fit completely to an initial single buffer. + // Extra buffer will scale correctly and rounding due to multilooking + // will yield correct number lines for the output file. + // Ex: floor(2097/25)*2 + floor(1578/25) = 229 lines (wrong) (bufferlines/mlL)*Nbuffers+extrabufferlines/mlL + } // Ex: floor(2075/25)*25*2 + floor(1622/25) = 230 lines (correct) + else // no-multilooking + { + bufferlines = int32( floor(Nlinesml_possible) ); // [MA] instead of ceil, prefered floor to use less mem + } + if (bufferlines > Nlinesml) bufferlines=Nlinesml; // if bufferlines > Nlines then shrink bufferlines to Nlines, no extra buffer requested. + INFO << "Possible max. buffer lines: " << bufferlines << " for " << BUFFERMEMSIZE << " memory size."; + INFO.print(); + numfullbuffers = Nlinesml / bufferlines; + restlines = Nlinesml % bufferlines; // the number of lines in extra buffer + extrabuffer = (restlines == 0) ? 0 : 1; + + // ______ Extra info ______ + INFO << "Interpolation in: " << numfullbuffers << " buffers of " + << bufferlines << " lines and " << extrabuffer << " extra buffer of " + << restlines << " lines."; + INFO.print(); + +INFO << "OutputFile forefdem: "<< refdeminput.forefdem; +INFO.print(); + // ______ Open output files ______ + ofstream refdemofile; // refdem phase + openfstream(refdemofile,refdeminput.forefdem,generalinput.overwrit); + bk_assert(refdemofile,refdeminput.forefdem,__FILE__,__LINE__); + + // _____ start added by MA _____ + ofstream refdemofilenoML; // [MA] refdem phase no-multilooked + { // local scope practice + string fname = string(refdeminput.forefdem) + ".noML"; // new name as m_s_refdemphase.raw.noML + if ( mlookedIFG == true) // if ifg is multilooked by a factor + { + openfstream(refdemofilenoML,fname.c_str(),generalinput.overwrit); + bk_assert(refdemofilenoML,fname.c_str(),__FILE__,__LINE__); + } + else // no-multilooking + { + if(!remove(fname.c_str())) // when success report removed. + { + WARNING << "Removed existing " << fname << "file."; + WARNING.print(); + } + } + } + // _____ end added by MA _____ + + // if request for height in radar coordinates l,p + ofstream refdemheiofile;// Radarcoded DEM (Z.Perski) + if (outputrefdemhei==true) + { + INFO << "OutputFile forefdemhei: "<< refdeminput.forefdemhei; + INFO.print(); + + openfstream(refdemheiofile,refdeminput.forefdemhei,generalinput.overwrit); + bk_assert(refdemheiofile,refdeminput.forefdemhei,__FILE__,__LINE__); + } + + // if request for h2ph in radar coordinates l,p + ofstream h2phofile; + if (outputh2ph==true) + { + openfstream(h2phofile,refdeminput.foh2ph,generalinput.overwrit); + bk_assert(h2phofile,refdeminput.foh2ph,__FILE__,__LINE__); + } + + // ______ interpolation loop per buffer ______ + for (register int32 buffer = 0; buffer < numfullbuffers + extrabuffer; ++buffer) + { + + // Determine indices for buffer + const int32 blines = (buffer == numfullbuffers) ? restlines : bufferlines; + const real8 firstline_buffer = veryfirstline+buffer*bufferlines*mlL; + const real8 lastline_buffer = firstline_buffer+(blines-1)*mlL; + + // ______ Extra info ______ + PROGRESS << STEP << "Interpolation buffer: " << buffer+1 << "of" << numfullbuffers + extrabuffer << " [l0:lN, p0:pN]: " << " [" + << firstline_buffer << ": " << lastline_buffer << ", " + << firstpixel << ": " << lastpixel << "]"; + PROGRESS.print(); + + // Get corners of buffer + real8 phimin_az; + real8 phimax_az; + real8 lambdamin_az; + real8 lambdamax_az; + getcorners(firstline_buffer+offset,lastline_buffer+offset, + firstpixel+offset,lastpixel+offset, + extralat,extralong,phimax,lambdamin, + DEMdeltalat,DEMdeltalon,NrowsDEM,NcolsDEM, + ellips,master,masterorbit,phimin_az,phimax_az,lambdamin_az,lambdamax, + indexphi0DEM,indexphiNDEM,indexlambda0DEM,indexlambdaNDEM); + + window zerooffset (0,0,0,0); + window winfromfile (indexphi0DEM,indexphiNDEM, + indexlambda0DEM,indexlambdaNDEM); + const int32 NrowsDEM_buffer = indexphiNDEM-indexphi0DEM+1; + const int32 NcolsDEM_buffer = indexlambdaNDEM-indexlambda0DEM+1; + + PROGRESS << STEP << "Reading input for interpolation buffer: " << buffer+1 << "of" << numfullbuffers + extrabuffer; + PROGRESS.print(); + + // read x,y + matrix DEMline_buffer(NrowsDEM_buffer,NcolsDEM_buffer); + matrix DEMpixel_buffer(NrowsDEM_buffer,NcolsDEM_buffer); + + readfile(DEMline_buffer,"crd_m_demline.temp",NrowsDEM,winfromfile,zerooffset); + readfile(DEMpixel_buffer,"crd_m_dempixel.temp",NrowsDEM,winfromfile,zerooffset); + + // read z (multiple, number can easily be increased, e.g. simulated intensity) + int32 Nz = 1; //number of z + matrix input_buffer(NrowsDEM_buffer *Nz ,NcolsDEM_buffer); + matrix temp_input_buffer(NrowsDEM_buffer,NcolsDEM_buffer); + if (outputrefdemhei==true) + { + Nz += 1; + input_buffer.resize(NrowsDEM_buffer *Nz ,NcolsDEM_buffer); + } + if (outputh2ph==true) + { + Nz += 1; + input_buffer.resize(NrowsDEM_buffer *Nz ,NcolsDEM_buffer); + } + + readfile(temp_input_buffer,"crd_dem_refphase.temp",NrowsDEM,winfromfile,zerooffset); + input_buffer.setdata(0, 0, temp_input_buffer); + Nz = 1; + if (outputrefdemhei==true) + { + Nz += 1; + /// i would like to use real4, test later on + matrix dem_input(NrowsDEM_buffer,NcolsDEM_buffer); + readfile(dem_input,refdeminput.fodem,NrowsDEM,winfromfile,zerooffset); + for (register int32 i =0 ; i < NrowsDEM_buffer ; i ++) + for(register int32 j = 0; j < NcolsDEM_buffer; j++) + temp_input_buffer(i,j) = real8(dem_input(i,j)); + input_buffer.setdata(NrowsDEM_buffer * (Nz-1), 0, temp_input_buffer); + } + if (outputh2ph==true) + { + Nz += 1; + readfile(temp_input_buffer,"crd_dem_h2ph.temp",NrowsDEM,winfromfile,zerooffset); + input_buffer.setdata(NrowsDEM_buffer * (Nz-1) , 0, temp_input_buffer); + } + + // initialize output array + Nz = 1; + matrix output_buffer(blines * Nz, Npixelsml); + + if (outputrefdemhei==true) + { + Nz += 1; + output_buffer.resize(blines * Nz, Npixelsml); + } + if (outputh2ph==true) + { + Nz += 1; + output_buffer.resize(blines * Nz, Npixelsml); + } + + + // interpolation + griddatalinear(DEMline_buffer,DEMpixel_buffer,input_buffer, + firstline_buffer,lastline_buffer,firstpixel,lastpixel, + mlL,mlP,r_az_ratio,offset,NODATA,output_buffer); + + + //MA multilooking will start here. + //MA cast all output files to type real4 + matrix output_layer(blines,Npixelsml); + + Nz = 1; // matrix 3rd dimension counter, 1 --> PHASE + + for (register int32 i =0 ; i < blines ; i++) + for(register int32 j = 0; j < Npixelsml; j++) + output_layer(i,j) = real4(output_buffer(i,j)); // real8 --> real4 + // convert_type(output_buffer,output_layer); // TODO MA, should replace above 3 lines but this one doesn't work properly yet + + cerr << "refphase: blines: " << blines << " Npixelsml " << Npixelsml << endl; + INFO << "size buffer: lines: " << output_layer.lines()<< ", " << output_layer.pixels() < real4 + //refdemheiofile << output_layer; // output reference dem heights + (mlookedIFG == true) ? refdemheiofile << multilook(output_layer, ifgmlL, ifgmlP) // [MA] + : refdemheiofile << output_layer ; + //refdemheiofile << output_buffer(window((Nz-1) * blines,Nz * blines - 1, 0, Npixelsml -1 )); + } + if (outputh2ph==true) + { + Nz += 1; + for (register int32 i = blines * (Nz-1) ; i < blines * Nz ; i++) + for(register int32 j = 0; j < Npixelsml; j++) + output_layer(i-(blines * (Nz-1)),j) = real4(output_buffer(i,j)); // real8 --> real4 + //h2phofile << output_layer; // output h2ph matrix + (mlookedIFG == true) ? h2phofile << multilook(output_layer, ifgmlL, ifgmlP) // [MA] + : h2phofile << output_layer; + //h2phofile << output_buffer(window((Nz-1) * blines,Nz * blines - 1, 0, Npixelsml -1 )); + } + + DEMline_buffer.resize(1,1); // deallocate + DEMpixel_buffer.resize(1,1); + input_buffer.resize(1,1); + temp_input_buffer.resize(1,1); + output_buffer.resize(1,1); + + } // end loop azimuth direction + + INFO << "Closing output files"; + INFO.print(); + + refdemofile.close(); // has the same multilook as interferrogram + if (mlookedIFG==true) + refdemofilenoML.close(); // [MA] if interferogram is mlooked then this is + // generated as non-multilooked for coherence estimation. + if (outputrefdemhei==true) // For Zbigniew Perski + refdemheiofile.close(); + if (outputh2ph==true) + h2phofile.close(); + + //=================================================================== + //============ End second loop: interpolation ============= + //============ (radar geometry) ============= + //=================================================================== + + // === Clean up temporary outfiles === [MA] + + if (remove("crd_m_demline.temp")) // remove files + WARNING.print("code 101: could not remove file: crd_m_demline.temp."); + if (remove("crd_m_dempixel.temp")) + WARNING.print("code 101: could not remove file: crd_m_dempixel.temp."); + if (remove("crd_dem_refphase.temp")) + WARNING.print("code 101: could not remove file: crd_dem_refphase.temp."); + if ( outputh2ph==true && remove("crd_dem_h2ph.temp")) // output available + WARNING.print("code 101: could not remove file: crd_dem_h2ph.temp."); + + // === Clean up Done. === + + // ====== Write output information ====== + char croppeddemi[4*ONE27]; + strcpy(croppeddemi,"NO output requested"); + if (outputdemi) strcpy(croppeddemi,refdeminput.fodemi); + INFO << "Min. value of input DEM covering interferogram: " << min_input_dem; + INFO.print(); + INFO << "Max. value of input DEM covering interferogram: " << max_input_dem; + INFO.print(); + + ofstream scratchlogfile("scratchlogcomprefdem", ios::out | ios::trunc); + bk_assert(scratchlogfile,"comprefdem: scratchlogcomprefdem",__FILE__,__LINE__); + scratchlogfile + << "\n*******************************************************************" + << "\n* " << processcontrol[pr_i_comprefdem] + << "\n*******************************************************************" + << "\n1) DEM source file: \t" << refdeminput.firefdem + << "\nFormat: \t"; + switch (refdeminput.iformatflag) + { + case FORMATI2: + { + scratchlogfile << "SHORT SIGNED INTEGER (HOST ENDIANNESS)"; + break; + } + case FORMATI2_BIGENDIAN: + { + scratchlogfile << "SHORT SIGNED INTEGER, BIG ENDIAN"; + break; + } + case FORMATR4: + { + scratchlogfile << "REAL4 SIGNED FLOAT"; + break; + } + case FORMATR8: + { + scratchlogfile << "REAL8 SIGNED DOUBLE"; + break; + } + default: + { + scratchlogfile << "UNKNOWN? IMPOSSIBLE..."; + break; + } + } + scratchlogfile + << "\nByte order: \t" << "check it yourself..." + << "\nNumber of lines: \t" << numberoflatpixels + << "\nNumber of pixels: \t" << numberoflonpixels + << "\nResolution latitude: \t" << rad2deg(DEMdeltalat) << " [deg]" + << "\nResolution longitude: \t" << rad2deg(DEMdeltalon) << " [deg]" + << "\nMost West point in input DEM: \t" << rad2deg(lon0file) + << "\nMost East point in input DEM: \t" << rad2deg(lonNfile) + << "\nMost South point in input DEM: \t" << rad2deg(latNfile) + << "\nMost North point in input DEM: \t" << rad2deg(lat0file) + << "\nMin. value of input DEM covering interferogram: " << min_input_dem + << "\nMax. value of input DEM covering interferogram: " << max_input_dem + << "\n2) Output file cropped DEM: \t" << refdeminput.fodem //[FVL + << "\nFormat: \t" << "REAL4" + << "\nByte order: \t" << "(same as host)" + << "\nNumber of lines (multilooked): \t" << NcolsDEM + << "\nNumber of pixels (multilooked): \t" << NrowsDEM + << "\nDEM extend w/e/s/n : \t" << rad2deg(lambdamin) << "/" + << rad2deg(lambdamax) << "/" << rad2deg(phimin) << "/" << rad2deg(phimax) +// << "\nMean value: \t" << meancroppedDEM + << "\n3) Output file interpolated crop DEM: \t" << croppeddemi + << "\nFormat: \t" << "REAL4" + << "\nByte order: \t" << "(same as host)" + << "\n4) Output file synthetic phase: \t" << refdeminput.forefdem + << "\nFormat: \t" << "REAL4" + << "\nByte order: \t" << "(same as host)" + << "\nNumber of lines (multilooked): \t" << ifgNlinesml + << "\nNumber of pixels (multilooked): \t" << ifgNpixelsml + +// this is not correct, only stats per buffer... +// << "\n\n----- Other STATS -----" +// << "\nTotal points in cropped DEM: \t" << numpoints +// << "\nNumber of valid points in DEM: \t" << numvalid +// << " (" << 100*numvalid/numpoints << "%)" +// << "\nNumber of NODATA points in DEM: \t" << numNODATA +// << " (" << 100*numNODATA/numpoints << "%)" +// << "\nMean height in meters at valid points:\t" << meancroppedDEM + << "\n*******************************************************************\n\n"; + scratchlogfile.close(); + + + ofstream scratchresfile("scratchrescomprefdem", ios::out | ios::trunc); + bk_assert(scratchresfile,"comprefdem: scratchrescomprefdem",__FILE__,__LINE__); + scratchresfile + << "\n\n*******************************************************************" + << "\n*_Start_" << processcontrol[pr_i_comprefdem] + << "\n*******************************************************************"; + if (onlyrefphasetopo==true) scratchresfile // [don] + << "\nInclude_flatearth: \tNo"; + else scratchresfile + << "\nInclude_flatearth: \tYes"; + scratchresfile + << "\nDEM source file: \t" << refdeminput.firefdem + << "\nMin. of input DEM: \t" << min_input_dem + << "\nMax. of input DEM: \t" << max_input_dem + << "\nData_output_file: \t" << refdeminput.forefdem + << "\nData_output_format: \t" << "real4" + << "\nFirst_line (w.r.t. original_master): \t" + << interferogram.win.linelo + << "\nLast_line (w.r.t. original_master): \t" + << interferogram.win.linehi + << "\nFirst_pixel (w.r.t. original_master): \t" + << interferogram.win.pixlo + << "\nLast_pixel (w.r.t. original_master): \t" + << interferogram.win.pixhi + << "\nMultilookfactor_azimuth_direction: \t" << ifgmlL + << "\nMultilookfactor_range_direction: \t" << ifgmlP + << "\nNumber of lines (multilooked): \t" << ifgNlinesml + << "\nNumber of pixels (multilooked): \t" << ifgNpixelsml + << "\n*******************************************************************" + << "\n* End_" << processcontrol[pr_i_comprefdem] << "_NORMAL" + << "\n*******************************************************************\n"; + scratchresfile.close(); + + + } // END radarcodedem + + +/**************************************************************** + * getcorners * + * * + * Get corners of window (approx) to select DEM in radians (if * + * height were zero) * + * * + * Implementation: * + * 1) calculate phi, lambda of corners * + * 2) select the extreme values * + * 3) add extra overlap * + * 4) determine the indices in the file * + * * + * Freek van Leijen, 07-AUG-2006 * + * * + ****************************************************************/ +void getcorners( + const real8 &l0, // master.currentwindow.linelo + const real8 &lN, // master.currentwindow. + const real8 &p0, // master.currentwindow. + const real8 &pN, // master.currentwindow. + const real8 &extralat, // + const real8 &extralong, // + const real8 &lat0, // lat0file + const real8 &long0, // lon0file + const real8 &DEMdeltalat, // + const real8 &DEMdeltalong, // + const int32 &Nlatpixels, // numberoflatpixels + const int32 &Nlongpixels, // numberoflonpixels + const input_ell &ellips, + const slcimage &master, + orbit &masterorbit, + real8 &phimin, + real8 &phimax, + real8 &lambdamin, + real8 &lambdamax, + int32 &indexphi0DEM, // returned + int32 &indexphiNDEM, // returned + int32 &indexlambda0DEM, // returned + int32 &indexlambdaNDEM) // returned + { + TRACE_FUNCTION("getcorners (FvL 07-AUG-2006)") + + DEBUG << "(getcorners) l0 :" << l0; + DEBUG.print(); + DEBUG << "(getcorners) lN :" << lN; + DEBUG.print(); + DEBUG << "(getcorners) p0 :" << p0; + DEBUG.print(); + DEBUG << "(getcorners) pN :" << pN; + DEBUG.print(); + DEBUG << "(getcorners) extralat :" << extralat; + DEBUG.print(); + DEBUG << "(getcorners) extralong :" << extralong; + DEBUG.print(); + DEBUG << "(getcorners) lat0 :" << lat0; + DEBUG.print(); + DEBUG << "(getcorners) long0 :" << long0; + DEBUG.print(); + DEBUG << "(getcorners) DEMdeltalat :" << DEMdeltalat; + DEBUG.print(); + DEBUG << "(getcorners) DEMdeltalong :" << DEMdeltalong; + DEBUG.print(); + DEBUG << "(getcorners) Total Nlatpixels :" << Nlatpixels; + DEBUG.print(); + DEBUG << "(getcorners) Total Nlongpixels :" << Nlongpixels; + DEBUG.print(); + + real8 phi; + real8 lambda; + real8 height; + lp2ell(l0,p0, + ellips, master, masterorbit, + phi, lambda, height); // returned + real8 phil0p0 = phi; + real8 lambdal0p0 = lambda; + + lp2ell(lN,p0, + ellips, master, masterorbit, + phi, lambda, height); // returned + real8 philNp0 = phi; + real8 lambdalNp0 = lambda; + + lp2ell(lN,pN, + ellips, master, masterorbit, + phi, lambda, height); // returned + real8 philNpN = phi; + real8 lambdalNpN = lambda; + + lp2ell(l0,pN, + ellips, master, masterorbit, + phi, lambda, height); // returned + real8 phil0pN = phi; + real8 lambdal0pN = lambda; + + // ______ Select DEM values based on rectangle outside l,p border ______ + phimin = min(min(min(phil0p0,philNp0),philNpN),phil0pN); + phimax = max(max(max(phil0p0,philNp0),philNpN),phil0pN); + lambdamin = min(min(min(lambdal0p0,lambdalNp0),lambdalNpN),lambdal0pN); + lambdamax = max(max(max(lambdal0p0,lambdalNp0),lambdalNpN),lambdal0pN); + + // ______ a little bit extra at edges to be sure ______ + // TODO this should change based on ascending or descending [MA] + phimin -= extralat; + phimax += extralat; + lambdamax += extralong; + lambdamin -= extralong; + + DEBUG << "(getcorners) phimin :" << phimin; + DEBUG.print(); + DEBUG << "(getcorners) phimax :" << phimax; + DEBUG.print(); + DEBUG << "(getcorners) lambdamin :" << lambdamin; + DEBUG.print(); + DEBUG << "(getcorners) lambdamax :" << lambdamax; + DEBUG.print(); + + // ______ Get indices of DEM needed ______ + // ______ Index boundary: [0:numberofx-1] ______ + + indexphi0DEM = int32(floor((lat0-phimax)/DEMdeltalat)); + if (indexphi0DEM < 0) + { + WARNING << "(getcorners) indexphi0DEM: " << indexphi0DEM; WARNING.print(); + indexphi0DEM=0; // default start at first + WARNING.print("DEM does not cover entire interferogram."); + WARNING.print("input DEM should be extended to the North."); + } + indexphiNDEM = int32(ceil((lat0-phimin)/DEMdeltalat)); + if (indexphiNDEM > Nlatpixels-1) + { + WARNING << "(getcorners) indexphiNDEM: " << indexphiNDEM; WARNING.print(); + indexphiNDEM=Nlatpixels-1; + WARNING.print("DEM does not cover entire interferogram."); + WARNING.print("input DEM should be extended to the South."); + } + indexlambda0DEM = int32(floor((lambdamin-long0)/DEMdeltalong)); + if (indexlambda0DEM < 0) + { + WARNING << "(getcorners) indexlambda0DEM: " << indexlambda0DEM; WARNING.print(); + indexlambda0DEM=0; // default start at first + WARNING.print("DEM does not cover entire interferogram."); + WARNING.print("input DEM should be extended to the West."); + } + indexlambdaNDEM = int32(ceil((lambdamax-long0)/DEMdeltalong)); + if (indexlambdaNDEM > Nlongpixels-1) + { + WARNING << "(getcorners) indexlambdaNDEM: " << indexlambdaNDEM; WARNING.print(); + indexlambdaNDEM=Nlongpixels-1; + WARNING.print("DEM does not cover entire interferogram."); + WARNING.print("input DEM should be extended to the East."); + } + + DEBUG << "(getcorners) indexphi0DEM :" << indexphi0DEM; + DEBUG.print(); + DEBUG << "(getcorners) indexphiNDEM :" << indexphiNDEM; + DEBUG.print(); + DEBUG << "(getcorners) indexlambda0DEM :" << indexlambda0DEM; + DEBUG.print(); + DEBUG << "(getcorners) indexlambdaNDEM :" << indexlambdaNDEM; + DEBUG.print(); + + + } // END getcorners + + +/**************************************************************** + * griddatalinear (naming after Matlab function) * + * * + * Implementation after GMT function triangulate.c * + ****************************************************************/ + void griddatalinear( + const matrix &x_in, + const matrix &y_in, + const matrix &z_in, + const real8 &x_min, + const real8 &x_max, + const real8 &y_min, + const real8 &y_max, + const int32 &x_inc, + const int32 &y_inc, + const real8 &r_az_ratio, + const real8 &offset, + const real8 &NODATA, + matrix &grd + ) +{ + TRACE_FUNCTION("griddatalinear (LG&FvL 13-AUG-2006)") + + INFO << "griddataLinear interpolation."; + INFO.print(); + + int32 i, j, k, ij, p, i_min, i_max, j_min, j_max; + int32 n, nx, ny, zLoops, zLoop, zBlockSize,indexFirstPoint,zInterpolateBlockSize; + real8 vx[4], vy[4], xkj, xlj, ykj, ylj, zj, zk, zl, zlj, zkj, xp, yp; + real8 f, *a = NULL , *b = NULL , *c = NULL ; // linear interpolation parameters + struct triangulateio In, Out, vorOut; + + + // Initialize variables + + n = zBlockSize = x_in.size(); // block size of x and y coordination + + /* How many groups of z value should be interpolated */ + if (( z_in.size() % zBlockSize ) != 0) + { + INFO << "The input of the DEM buffer and z is not the same..."; + INFO.print(); + return; + } + else + zLoops = z_in.size()/x_in.size(); + + a = new real8[zLoops]; + b = new real8[zLoops]; + c = new real8[zLoops]; + if (a == NULL || b == NULL || c == NULL) + { + ERROR << "Memory ERROR in source file: " << __FILE__ + << " at line: " << __LINE__; + PRINT_ERROR(ERROR.get_str()); + throw(memory_error); + } + + nx = grd.lines()/zLoops; + ny = grd.pixels(); + zInterpolateBlockSize = grd.size()/zLoops; + + /* Set everything to 0 and NULL */ + memset ((void *)&In, 0, sizeof (struct triangulateio)); + memset ((void *)&Out, 0, sizeof (struct triangulateio)); + memset ((void *)&vorOut, 0, sizeof (struct triangulateio)); + + /* Allocate memory for input points */ + In.numberofpoints = n ; + In.pointlist = new real8 [2 * n]; + + /* Copy x,y points to In structure array */ + + for (i = j = 0; i < n; i++) + { + In.pointlist[j++] = *(x_in[0] + i); + In.pointlist[j++] = (*(y_in[0] + i)) * r_az_ratio ; + // to eliminate the effect of difference in range and azimuth spacing; + } + + /* Call Jonathan Shewchuk's triangulate algorithm. This is 64-bit safe since + * all the structures use 4-byte ints (longs are used internally). */ + + triangulate ("zIQB", &In, &Out, &vorOut); + //triangulate ("zIBV", &In, &Out, &vorOut); // [MA] for verbosing + + int32 *link = Out.trianglelist; /* List of node numbers to return via link */ + int32 np = Out.numberoftriangles; + + for (k = ij = 0; k < np; k++) + { + DEBUG << "k of np, ij: " << k << " of " << np << ", :" << ij; + DEBUG.print(); + //Store the Index of the first Point of this triangle. + indexFirstPoint = ij; + + vx[0] = vx[3] = *(x_in[0] + link[ij]); vy[0] = vy[3] = *(y_in[0] + link[ij]); ij++; + vx[1] = *(x_in[0] + link[ij]); vy[1] = *(y_in[0]+link[ij]); ij++; + vx[2] = *(x_in[0] + link[ij]); vy[2] = *(y_in[0]+link[ij]); ij++; + + if ( vx[0] == NODATA || vx[1] == NODATA || vx[2] == NODATA ) continue; + if ( vy[0] == NODATA || vy[1] == NODATA || vy[2] == NODATA ) continue; + + /* Compute grid indices the current triangle may cover.*/ + xp = min (min (vx[0], vx[1]), vx[2]); i_min = x_to_i (xp, x_min, x_inc, offset, nx); + //INFO << "xp: " << xp; + //INFO.print(); + xp = max (max (vx[0], vx[1]), vx[2]); i_max = x_to_i (xp, x_min, x_inc, offset, nx); + //INFO << "xp: " << xp; + //INFO.print(); + yp = min (min (vy[0], vy[1]), vy[2]); j_min = y_to_j (yp, y_min, y_inc, offset, ny); + //INFO << "yp: " << yp; + //INFO.print(); + yp = max (max (vy[0], vy[1]), vy[2]); j_max = y_to_j (yp, y_min, y_inc, offset, ny); + //INFO << "yp: " << yp; + //INFO.print(); + /* Adjustments for triangles outside -R region. */ + /* Triangle to the left or right. */ + if ((i_max < 0) || (i_min >= nx)) continue; + /* Triangle Above or below */ + if ((j_max < 0) || (j_min >= ny)) continue; + /* Triangle covers boundary, left or right. */ + if (i_min < 0) i_min = 0; if (i_max >= nx) i_max = nx - 1; + /* Triangle covers boundary, top or bottom. */ + if (j_min < 0) j_min = 0; if (j_max >= ny) j_max = ny - 1; + // for (kk = 0; kk ((x - xt[0]) * ( yt[2] - yt[0])) ? 1:-1; + int iRet1 = ((xt[0] - xt[1]) * (y - yt[1])) > ((x - xt[1]) * ( yt[0] - yt[1])) ? 1:-1; + int iRet2 = ((xt[1] - xt[2]) * (y - yt[2])) > ((x - xt[2]) * ( yt[1] - yt[2])) ? 1:-1; + + if ((iRet0 >0 && iRet1 > 0 && iRet2 > 0 ) || (iRet0 <0 && iRet1 < 0 && iRet2 < 0 )) + return 1; + else + return 0; + +} //END pointintriangle + diff --git a/doris_core/Makefile b/doris_core/Makefile index fa1307c..ce7b593 100644 --- a/doris_core/Makefile +++ b/doris_core/Makefile @@ -43,8 +43,8 @@ SHELL = /bin/sh ### Specify compiler/installation directory ### -INSTALLDIR = /data/src/Doris_s1_git/bin -CC = g++ +INSTALLDIR = /usr/local/bin +CC = g++-5 SCRIPTSDIR = ../bin ### Define statements controlling compilation ### @@ -93,7 +93,7 @@ CFLAGS = $(CFLAGSOPT) ### Library locations flag ### ### -lcl : used for veclib ### -lm : used for fftw -LFLAGS = -L/lib64 -lfftw3f -lm +LFLAGS = -L/usr/lib/x86_64-linux-gnu -lfftw3f -lm ##################################################### @@ -121,7 +121,7 @@ SWOBJS = $(SWSRCS:.cc=.o) SCRIPTS = helpdoris \ baseline.doris.sh \ baseline.doris.csh \ - construct_dem.sh \ +# construct_dem.sh \ coregpm.doris \ doris* \ heightamb \ diff --git a/doris_core/configure b/doris_core/configure index 0835c10..b18c51d 100755 --- a/doris_core/configure +++ b/doris_core/configure @@ -622,7 +622,7 @@ SWOBJS = \$(SWSRCS:.cc=.o) SCRIPTS = helpdoris \ baseline.doris.sh \ baseline.doris.csh \ - construct_dem.sh \ +# construct_dem.sh \ coregpm.doris \ doris* \ heightamb \ diff --git a/doris_stack/functions/do_deramp_SLC.py b/doris_stack/functions/do_deramp_SLC.py index c63a2e5..00925ff 100755 --- a/doris_stack/functions/do_deramp_SLC.py +++ b/doris_stack/functions/do_deramp_SLC.py @@ -32,7 +32,7 @@ def usage(): #*****************************************************************************# # Calculate chirp for deramping -ChirpFilt = get_ramp(resFilename, resampled=0, type='chirp') +ChirpFilt = get_ramp(resFilename, resampled=0,oversample=0, type='chirp')#updated by anurag to include oversampling res = ResData(resFilename) @@ -42,7 +42,8 @@ def usage(): lN = int(res.processes['oversample']['Last_line (w.r.t. ovs_image)']) p0 = int(res.processes['oversample']['First_pixel (w.r.t. ovs_image)']) pN = int(res.processes['oversample']['Last_pixel (w.r.t. ovs_image)']) - dataFormat = 'cpxfloat32' + #dataFormat = 'cpxfloat32' + dataFormat = 'cpxint16' else: # original data l0 = int(res.processes['crop']['First_line (w.r.t. original_image)']) lN = int(res.processes['crop']['Last_line (w.r.t. original_image)']) @@ -53,7 +54,7 @@ def usage(): # Image size Naz_res = lN-l0+1 Nrg_res = pN-p0+1 - +print(Naz_res) ################################################################################ # Read data diff --git a/doris_stack/functions/get_ramp.py b/doris_stack/functions/get_ramp.py index 923a512..fce9bc1 100755 --- a/doris_stack/functions/get_ramp.py +++ b/doris_stack/functions/get_ramp.py @@ -4,7 +4,7 @@ from gdalconst import * import sys -def get_ramp(res_file, resampled=0, type='chirp'): +def get_ramp(res_file, resampled=0, oversample=0, type='chirp'): # Read information ################################################################################ @@ -32,17 +32,22 @@ def get_ramp(res_file, resampled=0, type='chirp'): Trg_start = np.float64(get_parameter('Range_time_to_first_pixel (2way) (ms)', res_file,1))*1e-3 fsRg = np.float64(get_parameter('Range_sampling_rate (computed, MHz)', res_file,1)) - dt_az = np.float64(get_parameter('Azimuth_time_interval (s)', res_file,1)) + dt_az = np.float64(get_parameter('Azimuth_time_interval (s)', res_file,1)) #anurag on 04042020 dt_rg = 1/fsRg/1e6 # Number of lines - lNum = int(get_parameter('Number_of_lines_original', res_file,1)) + lNum = int(get_parameter('Number_of_lines_original', res_file,1)) #anurag on 04042020 if resampled == 1: l0 = int(get_parameter('First_line (w.r.t. original_master)', res_file,2,'*_Start_resample','* End_resample:_NORMAL')) lN = int(get_parameter('Last_line (w.r.t. original_master)', res_file,2,'*_Start_resample','* End_resample:_NORMAL')) p0 = int(get_parameter('First_pixel (w.r.t. original_master)', res_file,2,'*_Start_resample','* End_resample:_NORMAL')) pN = int(get_parameter('Last_pixel (w.r.t. original_master)', res_file,2,'*_Start_resample','* End_resample:_NORMAL')) + elif oversample==1: + l0 = int(get_parameter('First_line (w.r.t. ovs_image)', res_file, 1)) + lN = int(get_parameter('Last_line (w.r.t. ovs_image)', res_file, 1)) + p0 = int(get_parameter('First_pixel (w.r.t. ovs_image)', res_file, 1)) + pN = int(get_parameter('Last_pixel (w.r.t. ovs_image)', res_file, 1)) else: l0 = int(get_parameter('First_line (w.r.t. original_image)', res_file, 1)) lN = int(get_parameter('Last_line (w.r.t. original_image)', res_file, 1)) @@ -67,7 +72,7 @@ def get_ramp(res_file, resampled=0, type='chirp'): os.remove(Link_DATA) RAW_DATA_ABSOLUTE_PATH=os.path.abspath(Link_rsmp_orig_slave_pixel) - print "RAW_DATA_ABSOLUTE_PATH=", RAW_DATA_ABSOLUTE_PATH + print("RAW_DATA_ABSOLUTE_PATH=", RAW_DATA_ABSOLUTE_PATH) os.symlink(RAW_DATA_ABSOLUTE_PATH,Link_DATA) outStream = open(Path_MFF_HDR,'w') @@ -99,7 +104,7 @@ def get_ramp(res_file, resampled=0, type='chirp'): RAW_DATA_ABSOLUTE_PATH=os.path.abspath(Link_rsmp_orig_slave_line) - print "RAW_DATA_ABSOLUTE_PATH=", RAW_DATA_ABSOLUTE_PATH + print("RAW_DATA_ABSOLUTE_PATH=", RAW_DATA_ABSOLUTE_PATH) os.symlink(RAW_DATA_ABSOLUTE_PATH,Link_DATA) outStream = open(Path_MFF_HDR,'w') @@ -137,7 +142,7 @@ def get_ramp(res_file, resampled=0, type='chirp'): TazGrid = np.tile(Tvect_az, (1, Nrg_res)) else: - print 'variable resampled can only be 0 or 1!' + print('variable resampled can only be 0 or 1!') return #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -200,7 +205,7 @@ def get_ramp(res_file, resampled=0, type='chirp'): elif type == 'DC': data = Df_AzCtr + Taz_vec * DR_est else: - print 'Choose either chirp or DC for type' + print('Choose either chirp or DC for type') return return data @@ -260,7 +265,7 @@ def freadbk(path_file,line_start=1, pixels_start=1,nofLines1=None,nofPixels1=Non gdal.AllRegister() thisBurstData_file=gdal.Open(path_file,GA_ReadOnly) if thisBurstData_file is None: - print 'Could not open'+Path_MFF_HDR + print('Could not open'+Path_MFF_HDR) sys.exit(1) #print 'Driver: ', thisBurstData_file.GetDriver().ShortName,'/', \ # thisBurstData_file.GetDriver().LongName @@ -269,14 +274,14 @@ def freadbk(path_file,line_start=1, pixels_start=1,nofLines1=None,nofPixels1=Non #print 'Projection is ',thisBurstData_file.GetProjection() geotransform = thisBurstData_file.GetGeoTransform() if not geotransform is None: - print 'Origin = (',geotransform[0], ',',geotransform[3],')' - print 'Pixel Size = (',geotransform[1], ',',geotransform[5],')' + print('Origin = (',geotransform[0], ',',geotransform[3],')') + print('Pixel Size = (',geotransform[1], ',',geotransform[5],')') cint_srd=thisBurstData_file.GetRasterBand(1) #print 'Band Type=',gdal.GetDataTypeName(cint_srd.DataType) if cint_srd.GetOverviewCount() > 0: - print 'Band has ', cint_srd.GetOverviewCount(), ' overviews.' + print('Band has ', cint_srd.GetOverviewCount(), ' overviews.') thisBurstData= cint_srd.ReadAsArray(int(pixels_start-1),int(line_start-1),nofPixels1,nofLines1) return thisBurstData ################################################################################## diff --git a/doris_stack/functions/job.started b/doris_stack/functions/job.started new file mode 100644 index 0000000..e69de29 diff --git a/doris_stack/functions/load_shape_unzip.py b/doris_stack/functions/load_shape_unzip.py index e192d56..832d791 100755 --- a/doris_stack/functions/load_shape_unzip.py +++ b/doris_stack/functions/load_shape_unzip.py @@ -68,7 +68,7 @@ def unzip_folder(zipped_folder, dest_folder, shapefile='', pol='', data=True, sw def extract_kml_preview(zipped_folder, dir='', kml=True, png=True, overwrite=False): # Extracts quicklook and/or .kml files. - + #print(zipped_folder) zipdat = zipfile.ZipFile(zipped_folder) if not dir: dir = os.path.dirname(zipped_folder) @@ -170,7 +170,7 @@ def load_shape(shapefile, buffer=0.02): if isinstance(shapefile, list): # If the coordinates are already loaded. (for example bounding box) shp = Polygon(shapefile) else: # It should be a shape file. We always select the first shape. - sh = fiona.open(shapefile).next() + sh = next(iter(fiona.open(shapefile)))#fiona.open(shapefile).next() shp = shape(sh['geometry']) # Now we have the shape we add a buffer and simplify first to save computation time. diff --git a/doris_stack/functions/precise_read.py b/doris_stack/functions/precise_read.py index bcb8ffe..e7dea32 100755 --- a/doris_stack/functions/precise_read.py +++ b/doris_stack/functions/precise_read.py @@ -21,7 +21,7 @@ def orbit_read(input_EOF_FileName): #import xml.etree.ElementTree as etree print 'Failed to load lxml.etree or xml.etree.cElementTree' sys.exit(1) - + #print(input_EOF_FileName) inTree = etree.parse(input_EOF_FileName) queryList = { # orbit inf @@ -152,4 +152,4 @@ def hms2sec(hmsString,convertFlag='float'): elif convertFlag == 'float' : return float(secString) else: - return int(secString) \ No newline at end of file + return int(secString) diff --git a/doris_stack/main_code/doris_main.py b/doris_stack/main_code/doris_main.py index 60e3c10..f285144 100644 --- a/doris_stack/main_code/doris_main.py +++ b/doris_stack/main_code/doris_main.py @@ -1,6 +1,7 @@ import argparse import os import xml.etree.ElementTree as ET +import sys from doris.doris_stack.main_code.doris_sentinel_1 import DorisSentinel1 """Doris processing diff --git a/doris_stack/main_code/doris_parameters.py b/doris_stack/main_code/doris_parameters.py index 266d7aa..6cbf19e 100644 --- a/doris_stack/main_code/doris_parameters.py +++ b/doris_stack/main_code/doris_parameters.py @@ -73,6 +73,7 @@ def __init__(self, stack_path): self.do_calc_coordinates = self._settings_compare('.do_calc_coordinates', 'yes') self.do_multilooking = self._settings_compare('.do_multilooking', 'yes') self.do_unwrap = self._settings_compare('.do_unwrap', 'yes') + # # used in Jobs # diff --git a/doris_stack/main_code/doris_sentinel_1.py b/doris_stack/main_code/doris_sentinel_1.py index a94a438..46e76c8 100644 --- a/doris_stack/main_code/doris_sentinel_1.py +++ b/doris_stack/main_code/doris_sentinel_1.py @@ -46,7 +46,7 @@ def run(self, doris_parameters_path, start_date, end_date, master_date): profile.log_time_stamp('start') # Create a datastack using the stack function stack = StackData(track_dir=track_dir, shape_dat=shape_dat, start_date=start_date, end_date=end_date, - polarisation=polarisation, path=stack_path, db_type=2, precise_dir=precise_orbits) + polarisation=polarisation, path=stack_path, db_type=2, precise_dir=precise_orbits, buffer=0.01) #profile.log_time_stamp('StackData') # Select the images which are new in this datastack. stack.select_image() @@ -74,7 +74,8 @@ def run(self, doris_parameters_path, start_date, end_date, master_date): profile.log_time_stamp('stack preparation finished') # Finally delete unzipped images stack.del_unpacked_image() - + + import single_master_stack # Now we import the script to create a single master interferogram @@ -86,16 +87,25 @@ def run(self, doris_parameters_path, start_date, end_date, master_date): # Copy the necessary files to start processing profile.log_time_stamp('initialize') processing.initialize() - + + + ## The following 3 lines have been added by Anurag on Monday2-10-02-2020 for testing oversampling using doris stack + ##updated on 21-03-2020: moved from after network esd to immediately after initialize + #if (dorisParameters.do_oversample): + #profile.log_time_stamp('oversample') + #processing.oversample(master=True) + # Calculate the coarse orbits of individual bursts if(dorisParameters.do_coarse_orbits): profile.log_time_stamp('coarse_orbits') processing.coarse_orbits() # Deramp the data of both slave and master + if(dorisParameters.do_deramp): profile.log_time_stamp('deramp') processing.deramp(master=True) # Still needed for coherence... # Fake the use of fine window coregistration, which is officially not needed + #sys.exit() if(dorisParameters.do_fake_fine_coreg_bursts): profile.log_time_stamp('fake_fine_coreg_bursts') processing.fake_fine_coreg() @@ -107,14 +117,19 @@ def run(self, doris_parameters_path, start_date, end_date, master_date): if(dorisParameters.do_fake_coreg_bursts): profile.log_time_stamp('fake_coreg_bursts') processing.fake_coregmp() - # Resample individual bursts + + # Resample individual bursts if(dorisParameters.do_resample): profile.log_time_stamp('resample') - processing.resample() + processing.resample(concatenate=True, ras=True, overwrite=True) + # Reramp burst if(dorisParameters.do_reramp): profile.log_time_stamp('reramp') processing.reramp() + + #processing.combine_slave(overwrite=False, ramped=False, deramped=True, ras=True) + #processing.combine_master(overwrite=False, ramped=False, deramped=True, ras=True) # Perform enhanced spectral diversity for full swath if(dorisParameters.do_esd): @@ -123,11 +138,11 @@ def run(self, doris_parameters_path, start_date, end_date, master_date): if (dorisParameters.do_network_esd): profile.log_time_stamp('network esd') processing.network_esd() - + # Make interferograms for individual bursts if(dorisParameters.do_interferogram): profile.log_time_stamp('interferogram') - processing.interferogram() + processing.interferogram(concatenate=False, ras=False) # Calculate earth reference phase from interferograms and combine for full swath if(dorisParameters.do_compref_phase): profile.log_time_stamp('compref_phase') @@ -143,12 +158,11 @@ def run(self, doris_parameters_path, start_date, end_date, master_date): # Remove height effects from interferograms and combine for full swath if(dorisParameters.do_ref_dem): profile.log_time_stamp('ref_dem') - processing.ref_dem(concatenate=True, ras=True) + processing.ref_dem(concatenate=True, ras=True, overwrite=True) # Geocode data if(dorisParameters.do_calc_coordinates): profile.log_time_stamp('calc_coordinates') processing.calc_coordinates() - # Correct using ramp ifgs based on ESD if(dorisParameters.do_ESD_correct): profile.log_time_stamp('ESD_correct') @@ -156,13 +170,11 @@ def run(self, doris_parameters_path, start_date, end_date, master_date): # Compute coherence if(dorisParameters.do_coherence): profile.log_time_stamp('coherence') - processing.coherence(ras=True) - + processing.coherence(ras=True, overwrite=True) if(dorisParameters.do_phasefilt): profile.log_time_stamp('phasefilt') processing.phasefilt(ras=True) # Multilook filtered image and coherence image - if(dorisParameters.do_multilooking): profile.log_time_stamp('multilooking') processing.multilook(step='coherence') diff --git a/doris_stack/main_code/dorisparameters.py b/doris_stack/main_code/dorisparameters.py index ba8cadc..0429a64 100644 --- a/doris_stack/main_code/dorisparameters.py +++ b/doris_stack/main_code/dorisparameters.py @@ -80,6 +80,8 @@ def __init__(self, stack_path): self.do_calc_coordinates = self._settings_compare('.do_calc_coordinates', 'yes') self.do_multilooking = self._settings_compare('.do_multilooking', 'yes') self.do_unwrap = self._settings_compare('.do_unwrap', 'yes') + + #self.do_oversample = self._settings_compare('.do_oversample', 'yes') # # used in Jobs # diff --git a/doris_stack/main_code/jobs.py b/doris_stack/main_code/jobs.py index 828fab9..bdb74ef 100644 --- a/doris_stack/main_code/jobs.py +++ b/doris_stack/main_code/jobs.py @@ -109,11 +109,11 @@ def run(self, job_list): self._start_jobs() while len(self.jobs_active): if(self.verbose): - print time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + "jobs busy" + print(time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + "jobs busy") time.sleep(self.between_sleep_time) self._check_active_jobs() self._start_jobs() if (self.verbose): - print time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + "jobs finished" + print(time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + "jobs finished") time.sleep(self.end_sleep_time) self._cleanup_flag_dir() diff --git a/doris_stack/main_code/resdata.py b/doris_stack/main_code/resdata.py index cd28c96..ebc0648 100644 --- a/doris_stack/main_code/resdata.py +++ b/doris_stack/main_code/resdata.py @@ -76,7 +76,7 @@ def meta_reader(self): temp[name] = [line] except: - print 'Error occurred at line: ' + line + print('Error occurred at line: ' + line) def process_reader(self,processes = ''): # This function reads random processes based on standard buildup of processes in res files. @@ -84,7 +84,7 @@ def process_reader(self,processes = ''): # If loc is true, it will only return the locations where different processes start. if not processes: - processes = self.process_control.keys() + processes = list(self.process_control.keys()) processes.append('leader_datapoints') process = '' @@ -171,10 +171,9 @@ def process_reader(self,processes = ''): row += 1 except: - print 'Error occurred at line: ' + line + print('Error occurred at line: ' + line) def process_spacing(self,process=''): - spacing = 0 table_spacing = [0,0,0,0,0,0,0] @@ -283,10 +282,10 @@ def write(self,new_filename=''): elif process == 'coarse_orbits': # the coarse orbits output is different from the others. if 'Control point' in line_key: # Special case coarse orbits... f.write((line_key + ' =').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n') - elif not isinstance(data[line_key], basestring): # Another special case + elif not isinstance(data[line_key], str): # Another special case f.write(line_key.ljust(spacing_row[0]) + (data[line_key][0]).ljust(spacing_row[1]) + data[line_key][1].ljust(spacing_row[2]) + ' '.join(data[line_key][2:]) + '\n') - elif isinstance(data[line_key], basestring): # Handle as in normal cases + elif isinstance(data[line_key], str): # Handle as in normal cases f.write((line_key + ':').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n') else: # If it consists out of two parts f.write((line_key + ':').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n') @@ -301,7 +300,7 @@ def write(self,new_filename=''): def insert(self,data,process,variable=''): # This function inserts a variable or a process which does not exist at the moment - processes = self.process_control.keys() + processes = list(self.process_control.keys()) processes.extend(['header','leader_datapoints']) if process not in processes: @@ -401,4 +400,4 @@ def request(self,process,variable=''): warnings.warn('This variable does not exist: ' + str(variable)) return - return data \ No newline at end of file + return data diff --git a/doris_stack/main_code/single_master_stack.py b/doris_stack/main_code/single_master_stack.py index 11f49bc..d579711 100644 --- a/doris_stack/main_code/single_master_stack.py +++ b/doris_stack/main_code/single_master_stack.py @@ -1,4 +1,4 @@ -import os +import os, sys import numpy as np from datetime import datetime from collections import OrderedDict @@ -77,7 +77,6 @@ def processing_read(self): folders = next(os.walk(self.folder))[1] folders = [fold for fold in folders if len(fold) == 8] - self.stack = dict() for fold in folders: @@ -97,7 +96,7 @@ def processing_read(self): for burst in bursts: burst_name = swath + '_' + burst self.stack[s_date][burst_name] = dict() - + self.read_res() def remove_finished(self, step='unwrap'): @@ -152,7 +151,7 @@ def initialize(self, path='',cascade=False): # Copy data files m_source = self.burst_path(key=burst, date=self.master_date, dat_type='slave', full_path=True) m_dest = self.burst_path(key=burst, date=date, dat_type='master', full_path=True) - + #print(m_source, m_dest) #Added and commented by Anurag on 1st Nov and 12th Nov. respectively if not os.path.exists(m_dest): os.symlink(m_source, m_dest) @@ -381,6 +380,76 @@ def correct_coarse_correlation(self): self.fake_master_steps(step='coarse_correl', burst_proc=False) + def oversample(self, master=True): + # Oversample slave and masters and slaves of bursts. + + if len(self.coreg_dates) == 0: + return + self.read_res(dates=self.coreg_dates) + + job_list1 = [] + job_list2 = [] + + # Oversample slaves + bursts = self.stack[self.coreg_dates[0]].keys() + for date in self.coreg_dates: + for burst in bursts: + path = self.burst_path(date, burst, full_path=True) + #slave_file = self.burst_path(key=burst, dat_type='slave', full_path=False) + #slave_deramped = self.burst_path(key=burst, dat_type='slave_deramped', full_path=False) + #print(self.stack) + if self.stack[date][burst]['slave'].process_control['oversample'] != '1': + path = self.burst_path(date, burst, full_path=True) + command1 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.oversample') + + job_list2.append({"path": path, "command": command1}) + + if not self.parallel: + os.chdir(path) + # Resample + os.system(command1) + + #Oversample master + date = self.master_date + + for burst in bursts: + path = self.burst_path(date, burst, full_path=True) + #master_file = self.burst_path(key=burst, dat_type='slave', full_path=False) + master_ovs = self.burst_path(key=burst, dat_type='slave_ovs', full_path=False) + #if self.stack[date][burst]['slave'].process_control['oversample'] != '1': + if not os.path.exists(os.path.join(path, master_ovs)) or not master: + path = self.burst_path(date, burst, full_path=True) + command1 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.oversample') + + job_list1.append({"path": path, "command": command1}) + + if not self.parallel: + os.chdir(path) + # Resample + os.system(command1) + + if self.parallel: + jobs = Jobs(self.nr_of_jobs, self.doris_parameters) + jobs.run(job_list1) + jobs.run(job_list2) + + + # Create links for master if needed. + for burst in bursts: + master_file = self.burst_path(key=burst, date=date, dat_type='slave_ovs', full_path=True) + + for date_slave in self.coreg_dates: + slave_file = self.burst_path(key=burst, date=date_slave, dat_type='master_ovs', full_path=True) + #print(slave_file) + if not os.path.exists(slave_file): + os.symlink(master_file, slave_file) + + + #self.update_res(dates=self.coreg_dates) + + + + def deramp(self, master=True): # Deramp slave and masters and slaves of bursts. @@ -398,7 +467,9 @@ def deramp(self, master=True): for burst in bursts: path = self.burst_path(date, burst, full_path=True) slave_file = self.burst_path(key=burst, dat_type='slave', full_path=False) + #slave_file = self.burst_path(key=burst, dat_type='slave_ovs', full_path=False) slave_deramped = self.burst_path(key=burst, dat_type='slave_deramped', full_path=False) + #slave_deramped = self.burst_path(key=burst, dat_type='slave_ovs_deramped', full_path=False) if not os.path.exists(os.path.join(path, slave_deramped)): command2 = 'python ' + os.path.join(self.function_path, 'do_deramp_SLC.py') + ' ' + slave_file + ' slave.res' @@ -409,6 +480,8 @@ def deramp(self, master=True): if self.stack[date][burst]['slave'].processes['crop']['Data_output_file'] != os.path.basename(slave_deramped): self.stack[date][burst]['slave'].processes['crop']['Data_output_file'] = os.path.basename(slave_deramped) + #if self.stack[date][burst]['slave'].processes['oversample']['Data_output_file'] != os.path.basename(slave_deramped): + #self.stack[date][burst]['slave'].processes['oversample']['Data_output_file'] = os.path.basename(slave_deramped) if self.stack[date][burst]['slave'].processes['readfiles']['deramp'] != '1': self.stack[date][burst]['slave'].processes['readfiles']['deramp'] = '1' if self.stack[date][burst]['master'].processes['readfiles']['reramp'] != '0': @@ -420,7 +493,9 @@ def deramp(self, master=True): for burst in bursts: path = self.burst_path(date, burst, full_path=True) master_file = self.burst_path(key=burst, dat_type='slave', full_path=False) + #master_file = self.burst_path(key=burst, dat_type='slave_ovs', full_path=False) master_deramped = self.burst_path(key=burst, dat_type='slave_deramped', full_path=False) + #master_deramped = self.burst_path(key=burst, dat_type='slave_ovs_deramped', full_path=False) if not os.path.exists(os.path.join(path, master_deramped)) or not master: command1 = 'python ' + os.path.join(self.function_path, 'do_deramp_SLC.py') + ' ' + master_file + ' slave.res' @@ -437,9 +512,11 @@ def deramp(self, master=True): # Create links for master if needed. for burst in bursts: master_file = self.burst_path(key=burst, date=date, dat_type='slave_deramped', full_path=True) + #master_file = self.burst_path(key=burst, date=date, dat_type='slave_ovs_deramped', full_path=True) for date_slave in self.coreg_dates: slave_file = self.burst_path(key=burst, date=date_slave, dat_type='master_deramped', full_path=True) + #slave_file = self.burst_path(key=burst, date=date_slave, dat_type='master_ovs_deramped', full_path=True) if not os.path.exists(slave_file): os.symlink(master_file, slave_file) if self.stack[date_slave][burst]['master'].processes['crop']['Data_output_file'] != os.path.basename(slave_file): @@ -853,7 +930,7 @@ def dac_full_swath(self): self.fake_master_steps(step='dem_assist', burst_proc=False) - def resample(self, type=''): + def resample(self, type='', concatenate=False, ras=False, overwrite=False): # Resample slave bursts if len(self.coreg_dates) == 0: @@ -881,7 +958,56 @@ def resample(self, type=''): jobs.run(jobList1) jobs.run(jobList2) + + #Added by Anurag to concatenate slave_rsmp.raw files from multiple bursts + self.read_res(dates=self.coreg_dates) + + if concatenate == True: + rsmp_name = 'slave_rsmp.raw' + self.concatenate(rsmp_name, rsmp_name, dt=np.dtype('complex64'), overwrite=overwrite, master=False) + + for date in self.coreg_dates: + if self.full_swath[date]['slave'].process_control['resample'] != '1' or overwrite is True: + # Add res file information + no_lines = self.full_swath[date]['master'].processes['readfiles']['Number_of_lines_original'] + no_pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original'] + line_0 = self.full_swath[date]['master'].processes['readfiles']['First_line (w.r.t. output_image)'] + line_1 = self.full_swath[date]['master'].processes['readfiles']['Last_line (w.r.t. output_image)'] + pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)'] + pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)'] + + burst = self.stack[date].keys()[0] + print(self.stack[date][burst]) + res = copy.deepcopy(self.stack[date][burst]['slave'].processes['resample']) + res['First_line (w.r.t. original_master)'] = line_0 + res['Last_line (w.r.t. original_master)'] = line_1 + res['First_pixel (w.r.t. original_master)'] = pix_0 + res['Last_pixel (w.r.t. original_master)'] = pix_1 + res['Number of lines (multilooked)'] = no_lines + res['Number of pixels (multilooked)'] = no_pixels + + self.full_swath[date]['slave'].insert(res, 'resample') + + path = self.image_path(date) + os.chdir(path) + # Finally show preview based on cpxfiddle + + if ras: + pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original'] + + if not os.path.exists('slave_rs_mag.ras') or overwrite: + mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 1/1 -f cr4 -l1 ' \ + '-p1 -P' + pixels + ' ' + rsmp_name + ' > slave_rs_mag.ras' + os.system(self.cpxfiddle + mag) + + + self.update_res(dates=self.coreg_dates) + + #self.fake_master_steps(step='resample', network=False, full_swath=concatenate) + + + def reramp(self, type=''): # This function reramps the radar data. If master is True, we assume that there is still an original master file # Which means that it is not needed to reramp that one. If master is false, only the slave is reramped. @@ -928,6 +1054,9 @@ def reramp(self, type=''): self.update_res(dates=self.coreg_dates) self.fake_master_steps(step='resample') + + + def fake_master_resample(self): # This script fakes a resample step for the master file (this is of course not really needed) @@ -967,8 +1096,12 @@ def fake_master_resample(self): lines = int(burst_res[date][burst]['slave'].processes['readfiles']['Last_line (w.r.t. output_image)']) - \ int(burst_res[date][burst]['slave'].processes['readfiles']['First_line (w.r.t. output_image)']) + 1 pixels = int(burst_res[date][burst]['slave'].processes['readfiles']['Last_pixel (w.r.t. output_image)']) - \ - int(burst_res[date][burst]['slave'].processes['readfiles']['First_pixel (w.r.t. output_image)']) + 1 + int(burst_res[date][burst]['slave'].processes['readfiles']['First_pixel (w.r.t. output_image)']) + 1 dtype = np.dtype([('re', np.int16), ('im', np.int16)]) + + print('resample_dat '+ resample_dat) + print('slave_deramped_dat '+ slave_deramped_dat) + if not os.path.exists(resample_dat): resample = np.memmap(resample_dat, dtype='complex64', mode='w+', shape=(lines, pixels)) @@ -983,7 +1116,12 @@ def fake_master_resample(self): np.int16).astype(np.float32).view(np.complex64) resample_reramped[:, :] = slc_ramped_dat resample_reramped.flush() - + + + rsmp_name = 'slave_rsmp.raw' + self.concatenate(rsmp_name, rsmp_name, dt=np.dtype('complex64'), overwrite=True, master=True) + + self.update_res(dates=[date], image_stack=image_res, burst_stack=burst_res) def fake_master_steps(self, step='subtr_refdem', network=True, burst_proc=True, full_swath=True): @@ -998,8 +1136,8 @@ def fake_master_steps(self, step='subtr_refdem', network=True, burst_proc=True, if not step in steps: print('Step ' + step + ' does not exist in processing') - return - elif step == 'resample': + #return + if step == 'resample': self.fake_master_resample() return @@ -1604,6 +1742,7 @@ def compref_dem(self, network=False): for date in self.coreg_dates: job_list1 = [] for burst in self.stack[date].keys(): + print(date, burst) if self.stack[date][burst]['ifgs'].process_control['comp_refdem'] != '1': path = self.burst_path(date, burst, full_path=True) command1 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.comprefdem') @@ -1612,6 +1751,7 @@ def compref_dem(self, network=False): os.chdir(path) os.system(command1) if (self.parallel): + #print(command1) jobs = Jobs(self.nr_of_jobs, self.doris_parameters) jobs.run(job_list1) @@ -1651,6 +1791,7 @@ def ref_dem(self,concatenate=True, overwrite=False, network=False, ras=False): if concatenate == True: self.concatenate('cint_srd.raw', 'cint_srd.raw', dt=np.dtype('complex64'), overwrite=overwrite) + self.concatenate('h2ph_srd.raw', 'h2ph_srd.raw', dt=np.dtype('float32'), overwrite=overwrite)#dt changed from cpx64 to float32 on 10-05-2021. for date in self.coreg_dates: @@ -1692,15 +1833,15 @@ def ref_dem(self,concatenate=True, overwrite=False, network=False, ras=False): pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original'] if not os.path.exists('interferogram_srd_mag.ras') or overwrite: - mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 20/5 -f cr4 -l1 ' \ + mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 8/2 -f cr4 -l1 ' \ '-p1 -P' + pixels + ' cint_srd.raw > interferogram_srd_mag.ras' os.system(self.cpxfiddle + mag) if not os.path.exists('interferogram_srd_mix.ras') or overwrite: - mix = ' -w ' + pixels + ' -e 0.3 -s 1.2 -q mixed -o sunraster -b -c jet -M 20/5 -f cr4 -l1 ' \ + mix = ' -w ' + pixels + ' -e 0.3 -s 1.2 -q mixed -o sunraster -b -c jet -M 8/2 -f cr4 -l1 ' \ '-p1 -P' + pixels + ' cint_srd.raw > interferogram_srd_mix.ras' os.system(self.cpxfiddle + mix) if not os.path.exists('interferogram_srd_pha.ras') or overwrite: - pha = ' -w ' + pixels + ' -q phase -o sunraster -b -c jet -M 20/5 -f cr4 -l1 ' \ + pha = ' -w ' + pixels + ' -q phase -o sunraster -b -c jet -M 8/2 -f cr4 -l1 ' \ '-p1 -P' + pixels + ' cint_srd.raw > interferogram_srd_pha.ras' os.system(self.cpxfiddle + pha) @@ -2015,16 +2156,21 @@ def calc_coordinates(self, createdem=True): if not os.path.exists(link_dem): os.symlink(dat_dem, link_dem) - def concatenate(self, burst_file, master_file, dt=np.dtype(np.float32), overwrite=False, dates=[], multilooked='False', res_type='master'): + def concatenate(self, burst_file, master_file, dt=np.dtype(np.float32), overwrite=False, dates=[], multilooked='False', res_type='master', master=False): # Concatenate all burst to a single full swath product. If burst_file = 'master' then the input master files are read... # This function also accepts cpxint16 datatype + # dt: 28-04-2021: master kwarg added to concatenate the master images, if needed if not dates: dates = self.stack.keys() + if master: + dates.append(self.master_date) job_list1 = [] for date in dates: path = self.image_path(date) + #print(path) + #continue final_path = os.path.join(path, master_file) if not os.path.exists(final_path) or overwrite == True: @@ -2035,6 +2181,7 @@ def concatenate(self, burst_file, master_file, dt=np.dtype(np.float32), overwrit if not self.parallel: os.chdir(path) os.system(command1) + if self.parallel: jobs = Jobs(self.nr_of_jobs, self.doris_parameters) jobs.run(job_list1) @@ -2152,6 +2299,12 @@ def burst_path(self, date='', key='', file_path='', stack_folder=False, dat_type file_path = dat_type + file_path + '.raw' elif dat_type == 'master_deramped' or dat_type == 'slave_deramped': file_path = dat_type[:-9] + file_path + '_deramped.raw' + elif dat_type == 'master_ovs' or dat_type == 'slave_ovs': + #file_path = dat_type[:-4] + file_path + '_ovs.raw' + file_path = dat_type + '.raw' + elif dat_type == 'master_ovs_deramped' or dat_type == 'slave_ovs_deramped': + #file_path = dat_type[:-4] + file_path + '_ovs.raw' + file_path = dat_type + '.raw' if full_path is True: if len(date) == 10: diff --git a/doris_stack/main_code/stack.py b/doris_stack/main_code/stack.py index f7a2101..8d447ca 100644 --- a/doris_stack/main_code/stack.py +++ b/doris_stack/main_code/stack.py @@ -140,7 +140,7 @@ def search_files(self, track_dir): images = list() top_dir = next(os.walk(track_dir)) - + for data in top_dir[2]: if data.endswith('.zip'): images.append(os.path.join(track_dir, data)) diff --git a/doris_stack/main_code/swath.py b/doris_stack/main_code/swath.py index 6db92e3..ccc35d0 100644 --- a/doris_stack/main_code/swath.py +++ b/doris_stack/main_code/swath.py @@ -7,6 +7,10 @@ from doris.doris_stack.main_code.burst import BurstMeta from doris.doris_stack.functions.swath_metadata import burst_coverage, swath_coverage, swath_precise +#from ..functions.xml_query import xml_query +#from burst import BurstMeta +#from ..functions.swath_metadata import burst_coverage, swath_coverage, swath_precise + class SwathMeta(object): # Class which stores and gathers information of a swath in a sentinel dataset diff --git a/envisat_tools/Makefile b/envisat_tools/Makefile index 9126dff..089c66f 100755 --- a/envisat_tools/Makefile +++ b/envisat_tools/Makefile @@ -17,12 +17,12 @@ ### Check if you agree with the options below (install_dir OK?) ### then simply type: "make install" SHELL = /bin/sh -CC = gcc +CC = g++ #CFLAGS = -O3 #CFLAGS = -m32 # for 64-bit systems, it requires compatibility lib32, no need for epr_api v2.2 LFLAGS = -lm -INSTALL_DIR = /home/gertmulder/bin/doris/doris_v5_wu_branch - +# INSTALL_DIR = /home/gertmulder/bin/doris/doris_v5_wu_branch +INSTALL_DIR = /usr/local/bin ############################################################# diff --git a/install/INSTALL.sh b/install/INSTALL.sh new file mode 100644 index 0000000..4926375 --- /dev/null +++ b/install/INSTALL.sh @@ -0,0 +1,58 @@ +cd ../doris_core +sudo apt-get install tcsh +./configure +make +make install + +# -------------------------------------------- +# - COMPILATION OF THE DORIS CORE ------------ +# -------------------------------------------- +cd ../doris_core +# 8. Read the README file +./configure #(creates "Makefile") # requires tcsh shell to run, to install type "" at shell prompt sudo apt-get install + #tcsh on Ubuntu platform. + #( +answer the questions about libraries, etc.) +make # (compiles the software) +make install # (installs doris and bin scripts) + + +#-------------------------------------------- +# - COMPILATION OF DORIS UTILITIES ----------- +# -------------------------------------------- +cd ../sartools +make +# 14. Review/edit the Makefile if this does not work +# (for example if you do not want to use GNU gcc/g++ as compiler) +make install # (installs in /usr/local/bin unless you edit the Makefile) + + +cd ../envisat_tools # on 64-bit system requires libc-dev-i386 library ex: "sudo apt-get install libc-dev-i386" +make +# 18. Review/edit the Makefile if this does not work +# (for example if you do not want to use gcc as compiler) +make install + +# -------------------------------------------- +# - INSTALLATION OF Doris - python part +# -------------------------------------------- + +# To use the Doris Python scripts you will need to install the following Python packages: +# - numpy, scipy (for calculations) +# - matplotlib (visualization) +# - requests (data download) +# - gdal, gdal-dev, shapely, fiona, pyproj, fastkml, osr (GIS) + + +# After installation of the python packages you have to make a link to the doris_core, cpxfiddle and snaphu executables. +# You can do so by executing the install script, but you will have to create user accounts first to be able to download +# Sentinel-1 and SRTM DEMs. + +# 25. create an account for the downloading of SRTM DEMs at +# https://urs.earthdata.nasa.gov/users/new +# 26. create an account for downloading Sentinel data at +# https://urs.earthdata.nasa.gov/users/new/ + +# move to the install directory +cd ../install +python init_cfg.py +# and fill in the different paths and user accounts diff --git a/install/INSTALL.txt b/install/INSTALL.txt index 7a4797b..dde1fae 100755 --- a/install/INSTALL.txt +++ b/install/INSTALL.txt @@ -35,8 +35,8 @@ There are 5 components in this distribution (doris v5.0beta). -------------------------------------------- 7. cd ../doris_core 8. Read the README file -9. ./configure (creates "Makefile") # requires tcsh shell to run, to install type "sudo apt-get install - tcsh" at shell prompt on Ubuntu platform. +9. ./configure (creates "Makefile") # requires tcsh shell to run, to install type "" at shell prompt sudo apt-get install + tcshon Ubuntu platform. ( +answer the questions about libraries, etc.) 10. make (compiles the software) 11. make install (installs doris and bin scripts) diff --git a/install/doris_config.xml b/install/doris_config.xml new file mode 100644 index 0000000..6392eec --- /dev/null +++ b/install/doris_config.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/prepare_stack/.create_dem.py.swo b/prepare_stack/.create_dem.py.swo new file mode 100644 index 0000000..a545ef6 Binary files /dev/null and b/prepare_stack/.create_dem.py.swo differ diff --git a/prepare_stack/.create_dem.py.swp b/prepare_stack/.create_dem.py.swp new file mode 100644 index 0000000..2615618 Binary files /dev/null and b/prepare_stack/.create_dem.py.swp differ diff --git a/prepare_stack/create_datastack_bash.py b/prepare_stack/create_datastack_bash.py index 52d58f9..3113004 100644 --- a/prepare_stack/create_datastack_bash.py +++ b/prepare_stack/create_datastack_bash.py @@ -18,7 +18,6 @@ def create(self, stack_folder, root_folder, nodes): settings = tree.getroot() source_path = settings.find('.source_path').text - python_path = os.path.dirname(source_path) doris_folder = os.path.dirname(settings.find('.doris_path').text) cpxfiddle_folder = os.path.dirname(settings.find('.cpxfiddle_path').text) snaphu_folder = os.path.dirname(settings.find('.snaphu_path').text) @@ -34,7 +33,7 @@ def create(self, stack_folder, root_folder, nodes): f.write('\n') f.write('#PBS -l nodes=1:ppn=' + nodes + ' \n') f.write('\n') - f.write('source_path=' + python_path + '\n') + f.write('source_path=' + source_path + '\n') f.write('export PYTHONPATH=$source_path:$PYTHONPATH \n') f.write('export PATH=' + doris_folder + ':' + cpxfiddle_folder + ':' + snaphu_folder + ':' + '$PATH \n') f.write('python ' + doris_run_script + ' -p ' + processing + ' \n') @@ -42,7 +41,7 @@ def create(self, stack_folder, root_folder, nodes): f.close() # make sure the file is executable - os.chmod(file_path, 0744) + os.chmod(file_path, 0o744) # Also create a download and dem creation bash script. file_path = os.path.join(stack_folder, 'create_dem.sh') @@ -53,27 +52,26 @@ def create(self, stack_folder, root_folder, nodes): f.write('#!/bin/bash \n') f.write('\n') - f.write('source_path=' + python_path + '\n') + f.write('source_path=' + source_path + '\n') f.write('export PYTHONPATH=$source_path:$PYTHONPATH \n') f.write('python ' + doris_run_script + ' ' + processing + ' SRTM3 \n') f.close() # make sure the file is executable - os.chmod(file_path, 0744) + os.chmod(file_path, 0o744) file_path = os.path.join(stack_folder, 'download_sentinel.sh') f = open(file_path, 'w') - - f.write('#!/bin/bash \n') - f.write('\n') - - f.write('source_path=' + python_path + '\n') + f.write('source_path=' + source_path + '\n') f.write('export PYTHONPATH=$source_path:$PYTHONPATH \n') doris_run_script = os.path.join(source_path, 'prepare_stack', 'download_sentinel_data_orbits.py') processing = stack_folder + f.write('#!/bin/bash \n') + f.write('\n') + f.write('python ' + doris_run_script + ' ' + processing + ' \n') f.close() # make sure the file is executable - os.chmod(file_path, 0744) \ No newline at end of file + os.chmod(file_path, 0o744) diff --git a/prepare_stack/create_dem.py b/prepare_stack/create_dem.py index f8334d2..69161a3 100644 --- a/prepare_stack/create_dem.py +++ b/prepare_stack/create_dem.py @@ -13,10 +13,10 @@ # Description srtm q data: https://lpdaac.usgs.gov/node/505 import numpy as np -import gdal -import gdalconst -import osr -from HTMLParser import HTMLParser +from osgeo import gdal +from osgeo import gdalconst +from osgeo import osr +from html.parser import HTMLParser import pickle import requests import os @@ -68,16 +68,17 @@ def create(self, shape_filename='', out_file='' ,var_file='', resample='regular_ except: print('Not possible to create DEM grid.') return - + # Now add the rounding and borders to add the sides of our image # Please use rounding as a n/60 part of a degree (so 1/15 , 1/10 or 1/20 of a degree for example..) latlim = [np.floor((latlim[0] - border) / rounding) * rounding, np.ceil((latlim[1] + border) / rounding) * rounding] lonlim = [np.floor((lonlim[0] - border) / rounding) * rounding, np.ceil((lonlim[1] + border) / rounding) * rounding] - + # First download needed .hgt files. Quality is either 1, 3 or 30. If possible the files are downloaded. Otherwise # we fall back to lower quality. This is done using the elevation package tiles, q_tiles, tiles_30 = self._download_dem_files(latlim, lonlim, quality, data_folder, password=password, username=username) - + print(tiles, q_tiles, tiles_30) + #sys.exit() # Then create the final grid. This depends on the needed data type and possible resampling... if quality == 'SRTM1': pixel_degree = 3600 @@ -262,7 +263,7 @@ def _download_dem_files(self, latlim, lonlim, quality, data_folder, username='', else: print('You should specify a username and password to download SRTM data. ') return - + filelist = self._srtm_listing(data_folder, username=username, password=password) outfiles = [] q_files = [] @@ -436,7 +437,7 @@ def _kml_shp_2_bb(self, filename): latlim = [min(lat), max(lat)] lonlim = [min(lon), max(lon)] else: - print 'format not recognized! Pleas creat either a .kml or .shp file.' + print('format not recognized! Pleas creat either a .kml or .shp file.') return [] return latlim, lonlim @@ -456,18 +457,27 @@ def _add_egm96(self, dem_tiff, egm_tiff, data_folder): if not os.path.exists(egm_source_tiff): if not os.path.exists(filename): # Download egm96 file - command = 'wget http://earth-info.nga.mil/GandG/wgs84/gravitymod/egm96/binary/WW15MGH.DAC -O ' + filename + #command = 'wget http://earth-info.nga.mil/GandG/wgs84/gravitymod/egm96/binary/WW15MGH.DAC -O ' + filename + command = 'wget https://github.com/anurag-kulshrestha/geoinformatics/raw/master/WW15MGH.DAC -O ' + filename os.system(command) # Get georeference latlim = [-90.125, 90.125] - lonlim = [-0.125, 359.875] + #lonlim = [-0.125, 359.875] + lonlim = [-179.125, 180.875] dlat = 0.25 dlon = 0.25 egm_data = self._create_georeference(latlim, lonlim, dlat, dlon, 'float32', egm_source_tiff) # Load data - egm96 = np.fromfile(filename, dtype='>i2').reshape((721, 1440)).astype('float32') + shp = (721, 1440) + egm96 = np.fromfile(filename, dtype='>i2').reshape(shp).astype('float32') + #Patch to flip the array to Aus on the right and South America on the left + egm96_flipped = np.empty(egm96.shape) + + egm96_flipped[:, :shp[1]//2] = egm96[:, shp[1]//2 :] + egm96_flipped[:, shp[1]//2:] = egm96[:, :shp[1]//2 ] + egm96 = egm96_flipped # Save as geotiff egm_data.GetRasterBand(1).WriteArray(egm96 / 100) @@ -480,7 +490,8 @@ def _add_egm96(self, dem_tiff, egm_tiff, data_folder): gdal.ReprojectImage(egm_source, egm_data, None, None, gdalconst.GRA_Bilinear) egm_data = None - dem_new = dem_tiff + '.new' + dem_new = dem_tiff + '_new.tiff'# patch by anurag on 16-06-2021 + #dem_new = dem_tiff + '.new' # Finally open original dataset and subtract command = 'gdal_calc.py -A ' + dem_tiff + ' -B ' + egm_tiff + ' --outfile=' + dem_new + ' --calc="A+B"' @@ -504,7 +515,7 @@ def _output_doris_inputfiles(self, dem_tiff, out_file, var_file): output_txt = out_file + '.doris_inputfile' output_var = var_file - output_var = open(output_var, 'w') + output_var = open(output_var, 'wb') txtfile = open(output_txt, 'w') dem_var = dict() @@ -559,38 +570,43 @@ def _output_doris_inputfiles(self, dem_tiff, out_file, var_file): def _srtm_listing(self, data_folder, username, password): # This script makes a list of all the available 1,3 and 30 arc second datafiles. # This makes it easier to detect whether files do or don't exist. - + data_file = os.path.join(data_folder, 'filelist') if os.path.exists(data_file): dat = open(data_file, 'r') filelist = pickle.load(dat) dat.close() return filelist + #******************************************************* + #Patch by Anurag + server = "https://e4ftl01.cr.usgs.gov" - server = "http://e4ftl01.cr.usgs.gov" - - folders = 'SRTM/SRTMGL1.003/2000.02.11/', 'SRTM/SRTMGL3.003/2000.02.11/', 'SRTM/SRTMGL30.002/2000.02.11/' - keys = ['SRTM1', 'SRTM3', 'SRTM30'] + folders = 'MEASURES/SRTMGL1.003/2000.02.11/SRTMGL1_page_1.html', 'MEASURES/SRTMGL1.003/2000.02.11/SRTMGL1_page_2.html', 'MEASURES/SRTMGL1.003/2000.02.11/SRTMGL1_page_3.html', 'MEASURES/SRTMGL1.003/2000.02.11/SRTMGL1_page_4.html','MEASURES/SRTMGL1.003/2000.02.11/SRTMGL1_page_5.html' , 'MEASURES/SRTMGL1.003/2000.02.11/SRTMGL1_page_6.html', 'MEASURES/SRTMGL3.003/2000.02.11/', 'MEASURES/SRTMGL30.002/2000.02.11/' + keys = ['SRTM1','SRTM1','SRTM1','SRTM1','SRTM1','SRTM1', 'SRTM3', 'SRTM30'] + #******************************************************* filelist = dict() filelist['SRTM1'] = dict() filelist['SRTM3'] = dict() filelist['SRTM30'] = dict() - + for folder, key_value in zip(folders, keys): - - conn = requests.get(server + '/' + folder, auth=(username, password)) + print(server+'/'+folder) + conn = requests.get(server + '/' + folder)#, auth=(username, password)) + if conn.status_code == 200: - print "status200 received ok" + print("status200 received ok") else: - print "an error occurred during connection" + print("an error occurred during connection") data = conn.text parser = parseHTMLDirectoryListing() parser.feed(data) files = parser.getDirListing() - + if key_value == 'SRTM1' or key_value == 'SRTM3': - files = [f for f in files if f.endswith('hgt.zip')] + #files = [f for f in files if f.endswith('hgt.zip')] + files_com = [f for f in files if f.endswith('hgt.zip')] + files = [f.split('/')[-1] for f in files_com] north = [int(filename[1:3]) for filename in files] east = [int(filename[4:7]) for filename in files] for i in [i for i, filename in enumerate(files) if filename[0] == 'S']: @@ -606,12 +622,12 @@ def _srtm_listing(self, data_folder, username, password): for i in [i for i, filename in enumerate(files) if filename[0] == 'w']: east[i] = east[i] * -1 - for filename, n, e in zip(files, north, east): + for filename, n, e in zip(files_com, north, east): if not str(n) in filelist[key_value]: filelist[key_value][str(n)] = dict() - filelist[key_value][str(n)][str(e)] = server + '/' + folder + filename + filelist[key_value][str(n)][str(e)] = filename#server + '/' + folder + filename - file_list = open(os.path.join(data_folder, 'filelist'), 'w') + file_list = open(os.path.join(data_folder, 'filelist'), 'wb') pickle.dump(filelist, file_list) file_list.close() @@ -727,7 +743,7 @@ def handle_endtag(self, tag): def handle_data(self, data): if self.inTitle: self.title = data - print "title=%s" % data + print("title=%s" % data) if "Index of" in self.title: # print "it is an index!!!!" self.isDirListing = True diff --git a/prepare_stack/create_inputfiles.py b/prepare_stack/create_inputfiles.py index 5247141..c8ea78f 100644 --- a/prepare_stack/create_inputfiles.py +++ b/prepare_stack/create_inputfiles.py @@ -31,7 +31,7 @@ def __init__(self, dem_info ,settings_table, sensor): self.xml_data = settings.find('.' + sensor) self.header_data = self.xml_data.find('.header_settings') - dem_info = open(dem_info, 'r') + dem_info = open(dem_info, 'rb') self.dem_var = pickle.load(dem_info) self.inputfilenames = ['coarsecorr', 'coarseorb', 'coherence', 'coherence_network', 'comprefdem', 'comprefpha', 'coregpm', diff --git a/prepare_stack/download_sentinel_data_orbits.py b/prepare_stack/download_sentinel_data_orbits.py index 4d18010..890ed29 100644 --- a/prepare_stack/download_sentinel_data_orbits.py +++ b/prepare_stack/download_sentinel_data_orbits.py @@ -2,7 +2,7 @@ # check for the files which are downloaded. import urllib -import urllib2 +import urllib2, urllib3 import ssl import re import os, sys @@ -57,7 +57,8 @@ def sentinel_available(start_day='', end_day='', sensor_mode='', product='', lev # Finally we do the query to get the search result. string = string[5:] + '&rows=1000' url = 'https://scihub.copernicus.eu/dhus/search?q=' + urllib.quote_plus(string) - print(url) + #url = 'https://scihub.copernicus.eu/gnss/search?q=' + urllib.quote_plus(string)# check for new orbit data downloading + #print(url) print('Requesting available products: ' + url) request = urllib2.Request(url) @@ -65,10 +66,11 @@ def sentinel_available(start_day='', end_day='', sensor_mode='', product='', lev request.add_header("Authorization", "Basic %s" % base64string) # connect to server. Hopefully this works at once + gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) try: - dat = urllib2.urlopen(request) + dat = urllib2.urlopen(request, context=gcontext) except: - print 'not possible to connect this time' + print('not possible to connect this time') return [], [], [] html_dat = '' @@ -88,6 +90,82 @@ def sentinel_available(start_day='', end_day='', sensor_mode='', product='', lev return products, links, dates +def sentinel_available_gnss(start_day='', end_day='', sensor_mode='', product='', level='', track='', polarisation='', orbit_direction='', ROI='', user='', password=''): + # All available sentinel 1 images are detected and printed on screen. + # Following variables can be used to make a selection. + # start_day > first day for downloads (default one month before now) [yyyymmdd] + # end_day > last day for downloads (default today) + # + + # string is the field we enter as url + string = '' + #add platform-name + string = string + ' AND ' + 'platformname:'+'Sentinel-1' + + if sensor_mode: + string = string + ' AND ' + 'sensoroperationalmode:' + sensor_mode + if product: + string = string + ' AND ' + 'producttype:' + product + if level: + string = string + ' AND ' + level + if orbit_direction: + string = string + ' AND ' + 'orbitdirection:' + orbit_direction + if track: + string = string + ' AND ' + 'relativeorbitnumber:' + track + if start_day: + start = datetime.datetime.strptime(start_day, '%Y-%m-%d').strftime('%Y-%m-%d') + else: + start = (datetime.datetime.now() - datetime.timedelta(days=350)).strftime('%Y-%m-%d') + if end_day: + end = datetime.datetime.strptime(end_day, '%Y-%m-%d').strftime('%Y-%m-%d') + else: + end = datetime.datetime.now().strftime('%Y-%m-%d') + if polarisation: + string = string + ' AND ' + 'polarisationmode:' + polarisation + if ROI: + shape_str = load_shape_info(ROI) + string = string + ' AND footprint:"Intersects(POLYGON(' + shape_str + '))"' + + date_string = 'beginPosition:[' + start + 'T00:00:00.000Z TO ' + end + 'T23:59:59.999Z] AND endPosition:[' + start + 'T00:00:00.000Z TO ' + end + 'T23:59:59.999Z]' + string = string + ' AND ' + date_string + + # Finally we do the query to get the search result. + string = string[5:] #+ '&rows=100' + #url = 'https://scihub.copernicus.eu/dhus/search?q=' + urllib.quote_plus(string) + url = 'https://scihub.copernicus.eu/gnss/search?q=' + urllib.quote_plus(string)# check for new orbit data downloading + #url = 'https://scihub.copernicus.eu/gnss/search?q=' + string + #url = url+'&start=100&rows=100'# + #TODO: automate swwitching between pages + print(url) + + print('Requesting available products: ' + url) + request = urllib2.Request(url) + base64string = base64.b64encode('%s:%s' % (user, password)) + request.add_header("Authorization", "Basic %s" % base64string) + # connect to server. Hopefully this works at once + gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) + try: + dat = urllib2.urlopen(request, context=gcontext) + except: + print('not possible to connect this time') + return [], [], [] + + html_dat = '' + for line in dat: + #print(line) + html_dat = html_dat + line + + parser = etree.HTMLParser() + tree = etree.fromstring(html_dat, parser) + products = [data for data in tree.iter(tag='entry')] + links = [data.find('link').attrib for data in tree.iter(tag='entry')] + dates = [data.findall('date')[1].text for data in tree.iter(tag='entry')] + + print('Following products will be downloaded') + for link in links: + print(link) + + return products, links, dates def load_shape_info(shapefile): # This script converts .kml, .shp and .txt files to the right format. If multiple shapes are available the script @@ -115,7 +193,7 @@ def load_shape_info(shapefile): st = st + str(p[0]) + ' ' + str(p[1]) + ',' st = st[:-1] + ')' else: - print 'format not recognized! Pleas creat either a .kml or .shp file.' + print('format not recognized! Pleas creat either a .kml or .shp file.') return [] return st @@ -128,7 +206,7 @@ def sentinel_check_validity(products=[], destination_folder='', user='', passwor invalid_files = [] if not products: - print 'Nothing to check' + print('Nothing to check') return for product in products: @@ -183,7 +261,7 @@ def sentinel_download(products=[], xml_only=False, destination_folder='', proje # Download the files which are found by the sentinel_available script. if not products: - print 'No files to download' + print('No files to download') return wget_base = 'wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --continue --tries=20 --no-check-certificate --user=' + user + ' --password=' + password + ' ' @@ -285,6 +363,28 @@ def sentinel_download(products=[], xml_only=False, destination_folder='', proje os.system('rm ' + xml_dir) os.system('rm ' + xml_project) +def sentinel_orbit_gnss_download(products=[], xml_only=False, destination_folder='', project_folder='', user='', password=''): + # Download the files which are found by the sentinel_available script. + + if not products: + print('No files to download') + return + + wget_base = 'wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --continue --tries=20 --no-check-certificate --user=' + user + ' --password=' + password + ' ' + + for product in products: + date = str(product.findall('date')[1].text) + date = datetime.datetime.strptime(date[:19], '%Y-%m-%dT%H:%M:%S') + + url = str('"'+product.findall('link')[0].attrib['href'][:-6]+ urllib.quote_plus('$value') +'"') + name = str(product.find('title').text) + #print(url) + wget_data = wget_base + url + ' -O ' + destination_folder+'/'+name + print('download url is:' + wget_data) + os.system(wget_data) + #sys.exit() + + def sentinel_quality_check(filename, uuid, user, password): # Check whether the zip files can be unpacked or not. This is part of the download procedure. @@ -297,7 +397,7 @@ def sentinel_quality_check(filename, uuid, user, password): try: dat = urllib2.urlopen(request) except: - print 'not possible to connect this time' + print('not possible to connect this time') return False html_dat = '' @@ -318,6 +418,9 @@ def sentinel_quality_check(filename, uuid, user, password): else: return False +def download_orbits_1(start_date, end_date, pages=30, precise_folder='', restituted_folder =''): + pass + def download_orbits(start_date, end_date, pages=30, precise_folder='', restituted_folder =''): # This script downloads all orbits files from the precise orbits website, when pages is set to a very high number. # By default only the first page for the last two days (restituted) is checked. @@ -332,9 +435,11 @@ def download_orbits(start_date, end_date, pages=30, precise_folder='', restitute if precise_folder: for i in range(pages_poe): # First extract the orbitfiles from the page. - + url = 'https://qc.sentinel1.eo.esa.int/aux_poeorb/?page=' + str(i + 1) - gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + #url = 'https://qc.sentinel1.eo.esa.int/aux_poeorb/?validity_start='+2015+'&page=' + str(i + 1) + + gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) try: page = urllib2.urlopen(url, context=gcontext) except TypeError: @@ -342,24 +447,28 @@ def download_orbits(start_date, end_date, pages=30, precise_folder='', restitute html = page.read().split('\n') orb_files = [] - + for line in html: if re.search('', line): if re.search('EOF', line): dat = re.search('(.*)', line) orb_files.append(dat.group(1)) - + if not last_precise: last_precise = orb_files[0] for orb in orb_files: # Download the orbit files filename = os.path.join(precise_folder, orb) - + if int(orb[42:50]) + 1 <= end_num and int(orb[42:50]) + 1 >= start_num: - url = 'https://qc.sentinel1.eo.esa.int/aux_poeorb/' + orb + #url = 'https://qc.sentinel1.eo.esa.int/aux_poeorb/' + orb + url = 'http://aux.sentinel1.eo.esa.int/POEORB/'+orb[25:29]+'/'+orb[29:31]+'/'+orb[31:33]+'/'+orb+'.EOF' + #http://aux.sentinel1.eo.esa.int/POEORB/2019/02/26/S1B_OPER_AUX_POEORB_OPOD_20190226T110623_V20190205T225942_20190207T005942.EOF if not os.path.exists(filename): try: + #print(url, filename) + #sys.exit() urllib.urlretrieve(url, filename, context=gcontext) except TypeError: urllib.urlretrieve(url, filename) @@ -445,6 +554,7 @@ def download_orbits(start_date, end_date, pages=30, precise_folder='', restitute level = 'L1' sensor_mode = 'IW' product = 'SLC' + orbit_precice_product = 'AUX_POEORB' # user settings xml_name = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @@ -454,13 +564,12 @@ def download_orbits(start_date, end_date, pages=30, precise_folder='', restitute settings = tree.getroot() user = settings.find('.scihub_username').text password = settings.find('.scihub_password').text - - products, links, dates = sentinel_available(start_day=start_date, end_day=end_date, ROI=ROI, - polarisation=polarisation, sensor_mode=sensor_mode, track=track, - orbit_direction='', level=level, product=product,user=user, - password=password) - - sentinel_download(products, destination_folder=archive_folder, user=user, password=password) + #products, links, dates = sentinel_available(start_day=start_date, end_day=end_date, ROI=ROI, + #polarisation=polarisation, sensor_mode=sensor_mode, track=track, + #orbit_direction='', level=level, product=product,user=user, + #password=password) + + #sentinel_download(products, destination_folder=archive_folder, user=user, password=password) precise_folder = os.path.join(orbit_folder, 'precise') if not os.path.exists(precise_folder): @@ -468,5 +577,10 @@ def download_orbits(start_date, end_date, pages=30, precise_folder='', restitute restituted_folder = os.path.join(orbit_folder, 'restituted') if not os.path.exists(restituted_folder): os.makedirs(restituted_folder) - - download_orbits(start_date, end_date, pages=100, precise_folder=precise_folder, restituted_folder=restituted_folder) + + products_orbits, links_orbits, dates_orbits = sentinel_available_gnss(start_day=start_date, end_day=end_date,product=orbit_precice_product, user = 'gnssguest', password = 'gnssguest') + + sentinel_orbit_gnss_download(products_orbits, destination_folder=precise_folder, user = 'gnssguest', password = 'gnssguest') + #download_orbits(start_date, end_date, pages=300, precise_folder=precise_folder, restituted_folder=restituted_folder) + + #download_orbits_1(start_date, end_date, pages=300, precise_folder=precise_folder, restituted_folder=restituted_folder) diff --git a/prepare_stack/inputfile_template.xml b/prepare_stack/inputfile_template.xml index 5866a5f..a779ae3 100644 --- a/prepare_stack/inputfile_template.xml +++ b/prepare_stack/inputfile_template.xml @@ -1,5 +1,7 @@ - + sentinel-1 Settings for the headers @@ -49,7 +51,7 @@ OFF refdem.raw dem_radar.raw - h2ph_srd.raw + h2ph_srd.raw master_slave.crd diff --git a/prepare_stack/prepare_datastack.py b/prepare_stack/prepare_datastack.py index e414b80..bb2088c 100644 --- a/prepare_stack/prepare_datastack.py +++ b/prepare_stack/prepare_datastack.py @@ -42,12 +42,13 @@ def prepare(self, inputfile): srtm_password = settings_doris.find('.usgs_password').text dem_out = os.path.join(doris_input_xml.get_value('dem_folder'), 'dem.raw') dem_var = dem_out + '.var' - + #print('1') dem = CreateDem() + if doris_input_xml.get_value('generate_dem'.lower())=='yes': dem.create(doris_input_xml.get_value('shape_file_path'), dem_out, dem_var, resample=None, doris_input=True, rounding=0.1, border=1.5, - data_folder=doris_input_xml.get_value('dem_processing_folder'), quality='SRTM3', + data_folder=doris_input_xml.get_value('dem_processing_folder'), quality='SRTM1', password=srtm_password, username=srtm_username) ## Then create the inputfiles diff --git a/sar_tools/Makefile b/sar_tools/Makefile index fb3e1de..891ec63 100644 --- a/sar_tools/Makefile +++ b/sar_tools/Makefile @@ -22,8 +22,8 @@ SHELL = /bin/sh ### Installdirdef should exist! -#INSTALL_DIR = /usr/local/bin -INSTALL_DIR = /home/dlevelt/src/Doris_s1_git/bin +INSTALL_DIR = /usr/local/bin +#INSTALL_DIR = /home/dlevelt/src/Doris_s1_git/bin ### GCC compiler CC = g++