#!/usr/bin/env python
# __BEGIN_LICENSE__
#  Copyright (c) 2009-2013, United States Government as represented by the
#  Administrator of the National Aeronautics and Space Administration. All
#  rights reserved.
#
#  The NGT platform is licensed under the Apache License, Version 2.0 (the
#  "License"); you may not use this file except in compliance with the
#  License. You may obtain a copy of the License at
#  http://www.apache.org/licenses/LICENSE-2.0
#
#  Unless required by applicable law or agreed to in writing, software
#  distributed under the License is distributed on an "AS IS" BASIS,
#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#  See the License for the specific language governing permissions and
#  limitations under the License.
# __END_LICENSE__

import sys, argparse, subprocess, re, os, math, time, glob, shutil, math
import os.path as P

# Set up the path to Python modules about to load
basepath    = os.path.abspath(sys.path[0])
pythonpath  = os.path.abspath(basepath + '/../Python')  # for dev ASP
libexecpath = os.path.abspath(basepath + '/../libexec') # for packaged ASP
sys.path.insert(0, basepath) # prepend to Python path
sys.path.insert(0, pythonpath)
sys.path.insert(0, libexecpath)

import asp_system_utils, asp_string_utils, asp_cmd_utils
asp_system_utils.verify_python_version_is_supported()
from asp_stereo_utils import * # must be after the path is altered above

# Prepend to system PATH
os.environ["PATH"] = libexecpath + os.pathsep + os.environ["PATH"]

# This is explained below
if 'ASP_LIBRARY_PATH' in os.environ:
    os.environ['LD_LIBRARY_PATH'] = os.environ['ASP_LIBRARY_PATH']

# We will not symlink PC.tif and RD.tif which will be vrts,
# and neither the log files
skip_symlink_expr = r'^.*?-(PC\.tif|RD\.tif|log.*?\.txt)$'

def check_system_memory(opt, args, settings):
    '''Issue a warning when doing correlation if our selected options
    are estimated to exceed available RAM. Currently only the ASP SGM
    and MGM have large memory requirements.'''

    if asp_system_utils.run_with_return_code(['which', 'free']) != 0:
        # No 'free' exists. This is the case on OSX.
        return

    try:
        alg = stereo_alg_to_num(settings['stereo_algorithm'][0])
        if alg == VW_CORRELATION_BM or alg >= VW_CORRELATION_OTHER:
            return

        # Use a command line call to estimate the amount of free memory
        freemem_mb = list(map(int, os.popen('free -m').readlines()[-2].split()[1:]))[2]

        # This is the processor count code, won't work if other
        #  machines have a different processor count.
        num_procs = opt.processes
        if opt.processes is None:
            num_procs = get_num_cpus()

        # Memory usage calculations
        bytes_per_mb        = 1024*1024
        est_bytes_per_pixel = 8 # This is a very rough estimate!

        num_tile_pixels = pow(int(settings['corr_tile_size'][0]), 2)
        baseline_mem    = (num_tile_pixels*est_bytes_per_pixel) / bytes_per_mb
        sgm_ram_limit   = int(settings['corr_memory_limit_mb'][0])
        ram_per_process = baseline_mem + sgm_ram_limit
        est_ram_usage   = num_procs * ram_per_process

        if est_ram_usage > freemem_mb:
            print('Warning: Estimated maximum memory consumption is '
                  + str(est_ram_usage) + 'mb but only ' + str(freemem_mb)
                  + 'mb of free memory is detected.  To lower memory use, '
                  + 'consider reducing the number of processes or lowering '
                  + '--corr-memory-limit-mb.')

    except:
        # Don't let an error here prevent the tool from running.
        print('Warning: Error checking system memory, skipping the memory test!')
        return

def tile_dir(prefix, tile):
    return prefix + '-' + tile.name_str()

def produce_tiles(settings, tile_w, tile_h):
    '''Generate a list of bounding boxes for each output tile.'''
    image_size = settings["trans_left_image_size"]
    tiles_nx   = int(math.ceil(float(image_size[0]) / tile_w))
    tiles_ny   = int(math.ceil(float(image_size[1]) / tile_h))

    tiles = []
    for j in range(tiles_ny):
        for i in range(tiles_nx):
            c_tile_w = tile_w
            c_tile_h = tile_h
            if i == tiles_nx - 1:
                c_tile_w = int(image_size[0]) - i * tile_w
            if j == tiles_ny - 1:
                c_tile_h = int(image_size[1]) - j * tile_h
            tiles.append(BBox(i*tile_w,j*tile_h,c_tile_w,c_tile_h))

    return tiles

def sym_link_prev_run(prev_run_prefix, out_prefix):
    '''Sym link files from a previous run up to triangulation to the
    output directory of this run. We must not symlink directories from
    the previous run as then the new results will be written there.
    Do not sym link the previous point cloud, DEM, etc., also not
    VRT files as those will not be valid.
    '''

    # Ensure we do not wipe the run we want to reuse    
    if prev_run_prefix == out_prefix:
      raise Exception('The previous run prefix is the same as the current run prefix.')
      
    curr_dir = os.path.dirname(out_prefix)
    mkdir_p(curr_dir)
    
    for ext in ['L.tif', 'R.tif', 'L-cropped.tif', 'R-cropped.tif',
                'L_sub.tif', 'R_sub.tif', 'Mask_sub.tif',
                '.vwip', '.exr', '.match', 'GoodPixelMap.tif',
                'F.tif', 'stats.tif', 'Mask.tif', 'bathy_mask.tif']:
        files = glob.glob(prev_run_prefix + '*' + ext)
        for f in files:
            if os.path.isdir(f): continue # Skip folders
            dst_f = f[len(prev_run_prefix):] # strip the prefix
            dst_f = out_prefix + dst_f
            rel_src = os.path.relpath(f, curr_dir)
            if os.path.exists(dst_f):
                os.remove(dst_f) # this could be some dangling thing
            os.symlink(rel_src, dst_f)

def create_subproject_dirs(settings, **kw):

    # Create a subdirectory for each process we start.  Pretend
    # previous steps of stereo already ran in that directory by
    # creating symbolic links to actual files in parent run directory.
    # Don't symlink to RD.tif or PC.tif as those will be files which
    # actually need to be created in each subdirectory.
    # Return the created subdirectories.
    dirs = []
    out_prefix = settings['out_prefix'][0]
        
    # Save the list of subdirectories to disk. This is used in stereo_blend.
    dirList = out_prefix + '-dirList.txt'
    try:
        parentDir = os.path.dirname(dirList)
        mkdir_p(parentDir)
    except:
        pass
    
    print ("Writing: " + dirList)
    fout = open(dirList, 'w')

    for tile in produce_tiles(settings, opt.job_size_w, opt.job_size_h):
        subproject_dir = tile_dir(out_prefix, tile)
        dirs.append(subproject_dir)
        tile_prefix = subproject_dir + "/" + tile.name_str()
        if opt.dryrun:
            print("mkdir -p %s" % subproject_dir)
            print("Soft linking via %s %s" % (tile_prefix, out_prefix))
        else:
            mkdir_p(subproject_dir)

            fout.write(subproject_dir + "\n")

            # Get list of files in the output (not tile) directory
            files = glob.glob(out_prefix + '*')
            for f in files:
                if os.path.isdir(f): continue # Skip folders
                rel_src = os.path.relpath(f, subproject_dir)
                m = re.match(skip_symlink_expr, rel_src)
                if m: continue # won't sym link certain patterns
                # Make a symlink from main folder to the tile folder
                dst_f = f.replace(out_prefix, tile_prefix)
                if os.path.lexists(dst_f): continue
                os.symlink(rel_src, dst_f)

    fout.close()
    
    return dirs

def rename_files(settings, postfix_in, postfix_out, **kw):
    """
    Rename tile_dir/file_in.tif to tile_dir/file_out.tif.
    Skip symlinks, such as when this directory was processed before.
    """
    
    tiles = produce_tiles(settings, opt.job_size_w, opt.job_size_h)
    for tile in tiles:
        directory    = tile_dir(settings['out_prefix'][0], tile)
        filename_in  = directory + "/" + tile.name_str() + postfix_in
        filename_out = directory + "/" + tile.name_str() + postfix_out
        if os.path.isfile(filename_in) and (not os.path.islink(filename_in)):
            if os.path.exists(filename_out):
                os.remove(filename_out)
            os.rename(filename_in, filename_out)

def create_symlinks_for_multiview(settings, opt):

    # Running parallel_stereo for each pair in a mutiview run
    # creates files like:
    # out-prefix-pair2/2-4096_4096_1629_1629/4096_4096_1629_1629-F.tif
    # To run parallel_stereo for tri, we expect this at
    # out-prefix-4096_4096_1629_1629/4096_4096_1629_1629-pair2/2-F.tif
    # Create the latter as a sym link.

    create_subproject_dirs(settings) # symlink L.tif, etc

    tiles = produce_tiles(settings, opt.job_size_w, opt.job_size_h)
    for s in sorted(settings.keys()):
        m = re.match(r'multiview_command', s)
        if not m:
            continue
        local_settings=run_and_parse_output("stereo_parse", settings[s][1:],
                                             sep, opt.verbose)
        local_prefix = local_settings['out_prefix'][0]
        m = re.match(r'^(.*?)-pair(\d+)\/', local_prefix)
        if not m:
            continue
        base_prefix = m.group(1)
        index       = str(m.group(2))
        for tile in tiles:
            tile_str = tile.name_str()
            src_pref = tile_dir(base_prefix + '-pair' + index + '/' + index, tile) + '/' + tile_str
            dst_dir  = tile_dir(base_prefix, tile) + '/' + tile_str + '-pair' + index
            mkdir_p(dst_dir)

            dst_pref = dst_dir + '/' + index
            files = glob.glob(src_pref + '*')
            for f in files:
                m = re.match(skip_symlink_expr, f)
                if m:
                    continue # won't sym link certain patterns
                m = re.match(r'^' + src_pref + '(.*?)$', f)
                if not m:
                    continue
                suff    = m.group(1)
                src_f   = src_pref + suff
                dst_f   = dst_pref + suff
                rel_src = os.path.relpath(src_f, dst_dir)
                if os.path.lexists(dst_f):
                    continue
                os.symlink(rel_src, dst_f)

def use_padded_tiles(settings):
    alg = stereo_alg_to_num(settings['stereo_algorithm'][0])
    return (alg > VW_CORRELATION_BM or settings['alignment_method'][0] == 'local_epipolar')

def grow_crop_tile_maybe(settings, prog, tile):
    '''
    The tile, perhaps with padding added, and perhaps cropped to
    desired region, on which to run one of the stereo tools.
    '''

    # Make a local copy of the tile, as we may grow it
    local_tile = BBox(tile.x, tile.y, tile.width, tile.height)
    if use_padded_tiles(settings) and prog == 'stereo_corr':
        collar_size = int(settings['collar_size'][0])
        local_tile.add_collar(collar_size)

    # Will do only the tiles intersecting user's crop window.
    w = settings['transformed_window']
    user_crop_win = BBox(int(w[0]), int(w[1]), int(w[2]), int(w[3]))
    
    crop_box = intersect_boxes(user_crop_win, local_tile)
    if crop_box.width <= 0 or crop_box.height <= 0: 
        return [] # Don't need to process this

    return crop_box

def build_vrt(prog, settings, georef, postfix, tile_postfix, contract_tiles=False):
    '''Generate a VRT file to treat the separate image tiles as one large image.'''

    image_size = settings["trans_left_image_size"]

    vrt_file = settings['out_prefix'][0]+postfix
    print("Writing: " + vrt_file)
    f = open(vrt_file,'w')
    f.write("<VRTDataset rasterXSize=\"%i\" rasterYSize=\"%i\">\n" %
            (int(image_size[0]),int(image_size[1])))

    # Write the datum, projection, and georeference transform in XML format
    f.write("  <SRS>" + georef["WKT"] + "</SRS>\n")
    f.write("  <GeoTransform>" + georef["GeoTransform"] + "</GeoTransform>\n")

    # Locate a known good tile
    tiles = produce_tiles(settings, opt.job_size_w, opt.job_size_h)
    goodFilename = ""
    for tile in tiles:
        directory = tile_dir(settings['out_prefix'][0], tile)
        filename  = directory + "/" + tile.name_str() + tile_postfix
        if os.path.isfile(filename):
            goodFilename = filename
            break
    if goodFilename == "":
        raise Exception('No tiles were generated.')

    # Do gdalinfo on the good tile to get metadata
    args = [goodFilename]
    sep = "="
    gdal_settings = run_and_parse_output("gdalinfo", args, sep, opt.verbose)
    
    # Extract the data type (e.g., Float32 or Float64)
    data_type = "Float32"
    for s in gdal_settings:
        val = " ".join(gdal_settings[s])
        m = re.match(r'^.*? Type (\w+)', val)
        if m:
            data_type = m.group(1)
            break

    # Extract the block size, e.g., 256x256, from 'Band 2 Block=256x256'
    BlockXSize = 0
    BlockYSize = 0
    for s in gdal_settings:
        val = " ".join(gdal_settings[s])
        if re.match(r"^\s*Band\s+\d", s):
            m = re.match(r'^\s*(\d+)x(\d+)', val)
            if m:
                BlockXSize = m.group(1)
                BlockYSize = m.group(2)
                break

    # Extract no-data value, if present
    has_nodata = False
    key = 'NoData Value'
    nodata_value = 0
    if key in gdal_settings:
        has_nodata = True
        nodata_value = gdal_settings[key][0]

    # Find how many bands are in the file
    num_bands = 0
    for s in gdal_settings:
        m = re.match(r'^.*?Band\s+(\d+)', s)
        if m:
            b = int(m.group(1))
            if num_bands < b:
                num_bands = b

    # Copy some keys over to the vrt
    keys = ["POINT_OFFSET", "AREA_OR_POINT", "BAND1", "BAND2", "BAND3", "BAND4", "BAND5", "BAND6"]
    for key in keys:
        if key in gdal_settings:
            f.write("  <Metadata>\n    <MDI key=\"" + key + "\">" +
                    gdal_settings[key][0] + "</MDI>\n  </Metadata>\n")

    # Write each band
    for b in range(1, num_bands + 1):
        f.write("  <VRTRasterBand dataType=\"%s\" band=\"%i\">\n" % (data_type,b))

        for tile in tiles:
            directory = tile_dir(settings['out_prefix'][0], tile)
            filename  = directory + "/" + tile.name_str() + tile_postfix

            # Handle non-existent tiles
            if not os.path.isfile(filename):
                continue # Leave empty

            relative  = os.path.relpath(filename, os.path.dirname(settings['out_prefix'][0]))
            f.write("    <SimpleSource>\n")
            f.write("       <SourceFilename relativeToVRT=\"1\">%s</SourceFilename>\n" % relative)
            f.write("       <SourceBand>%i</SourceBand>\n" % b)

            # Bugfix for the "too many open files" problem.
            # Per https://lists.osgeo.org/pipermail/gdal-dev/2017-October/047370.html
            adjusted_tile = grow_crop_tile_maybe(settings, prog, tile)
            f.write(('       <SourceProperties RasterXSize="%i" RasterYSize="%i" ' + \
                     'DataType="%s" BlockXSize="%s" BlockYSize="%s"/>\n') \
                    % (adjusted_tile.width, adjusted_tile.height, data_type, BlockXSize, BlockYSize))

            min_x  = 0
            min_y  = 0
            if (contract_tiles):
                # Need to account for the padding
                pad_amount = int(settings['collar_size'][0])
                user_min_x = int(settings['transformed_window'][0])
                user_min_y = int(settings['transformed_window'][1])
                # For the tiles not starting at zero, account for the fact that they
                #  have padding at the top and/or right of the images.
                if (tile.x > user_min_x):
                    min_x  += pad_amount
                if (tile.y > user_min_y):
                    min_y  += pad_amount

            f.write('       <SrcRect xOff="%i" yOff="%i" xSize="%i" ySize="%i"/>\n' %
                    (min_x, min_y, tile.width, tile.height))

            f.write('       <DstRect xOff="%i" yOff="%i" xSize="%i" ySize="%i"/>\n' %
                    (tile.x, tile.y, tile.width, tile.height))
            f.write("    </SimpleSource>\n")
        # End tile loop

        if has_nodata:
            f.write("  <NoDataValue>" + nodata_value + "</NoDataValue>\n")
        
        f.write("  </VRTRasterBand>\n")
    # End band loop
    f.write("</VRTDataset>\n")
    f.close()

def get_num_nodes(nodes_list):

    if nodes_list is None:
        return 1 # local machine

    # Count the number of nodes without repetition (need this for
    # Pleiades).
    nodes = {}
    num_nodes = 0
    try:
        fh = open(nodes_list, "r")
        for line in fh:
            if re.match(r'^\s*$', line): continue # skip empty lines
            matches = re.match(r'^\s*([^\s]*)', line)
            if matches:
                nodes[matches.group(1)] = 1

        num_nodes = len(nodes)
    except Exception as e:
        die(e)
    if num_nodes == 0:
        raise Exception('The list of computing nodes is empty')

    return num_nodes

def get_best_procs_threads(step, settings):
    # Decide the best number of processes to use on a node, and how
    # many threads to use for each process.  There used to be some
    # fancy logic, see below, but it does not work well. For now,
    # use many processes and one thread per process.

    # We assume all machines have the same number of CPUs (cores)
    num_cpus = get_num_cpus()

    # Respect user's choice for the number of processes
    num_procs = num_cpus
    if opt.processes is not None:
        num_procs = opt.processes

    # Same for the number of threads.
    num_threads = 1
    if opt.threads_multi is not None:
        num_threads = opt.threads_multi
        
    # Old code, now turned off.
    if 0:
        if step == Step.corr:
            tile_size = int(settings['corr_tile_size'][0])
        elif step == Step.rfne:
            tile_size = int(settings['rfne_tile_size'][0])
        elif step == Step.tri:
            tile_size = int(settings['tri_tile_size'][0])
        else:
            raise Exception('Stereo step %d must be executed on a single machine.' \
                            % step)

        # We use the observation that each tile uses one thread,
        # so we need to find how many tiles are in the given job.
        num_threads = int(opt.job_size_w*opt.job_size_h/tile_size/tile_size)
        if num_threads > num_cpus: num_threads = num_cpus
        if num_threads <= 0: num_threads = 1

        # For triangulation, we need to consider the case of ISIS cameras
        # when we must use 1 thread unless CSM sensor models have been provided.
        if step == Step.tri:
            cam_info = " ".join(settings['in_file1'] + settings['in_file2'] + \
                                settings['cam_file1'] + settings['cam_file2'])
            m1 = re.search('\\.cub\\b',  cam_info, re.IGNORECASE)
            m2 = re.search('\\.json\\b', cam_info, re.IGNORECASE)
            m3 = re.search('\\.isd\\b',  cam_info, re.IGNORECASE)
            if m1 and not (m2 or m3):
                num_threads = 1

        num_procs = int(math.ceil(float(num_cpus)/num_threads))

    if opt.verbose:
        print("For stage %d, using %d threads and %d processes." %
              (step, num_threads, num_procs))

    return (num_procs, num_threads)

# Launch GNU Parallel for all tiles, it will take care of distributing
# the jobs across the nodes and load balancing. The way we accomplish
# this is by calling this same script but with --tile-id <num>.
def spawn_to_nodes(step, settings, args):

    if opt.processes is None or opt.threads_multi is None:
        # The user did not specify these. We will find the best
        # for their system.
        (procs, threads) = get_best_procs_threads(step, settings)
    else:
        procs = opt.processes
        threads = opt.threads_multi

    asp_cmd_utils.wipe_option(args, '--processes', 1)
    asp_cmd_utils.wipe_option(args, '--threads-multiprocess', 1)
    args.extend(['--processes', str(procs)])
    args.extend(['--threads-multiprocess', str(threads)])

    tiles = produce_tiles(settings, opt.job_size_w, opt.job_size_h)

    # Each tile has an index in the list of tiles. There can be a huge amount of
    # tiles, and for that reason we store their indices in a file, rather than
    # putting them on the command line. Keep this file in the run directory.
    out_prefix = settings['out_prefix'][0]
    tiles_index = out_prefix + "-tiles-index.txt"
    mkdir_p(os.path.dirname(tiles_index))
    f = open(tiles_index, 'w')
    for i in range(len(tiles)):
        f.write("%d\n" % i)
    f.close()

    # Use GNU parallel with given number of processes.
    # TODO(oalexan1): Run 'parallel' using the runInGnuParallel() function call,
    # when the ASP_LIBRARY_PATH trick can be fully encapsulated in the
    # asp_system_utils.py code rather than being needed for each tool.
    cmd = ['parallel', '--will-cite', '--env', 'ASP_DEPS_DIR', '--env', 'PATH', '--env', 'LD_LIBRARY_PATH', '--env', 'ASP_LIBRARY_PATH', '--env', 'PYTHONHOME', '-u', '-P', str(procs), '-a', tiles_index]
    if which(cmd[0]) is None:
        raise Exception('Need GNU Parallel to distribute the jobs.')

    if opt.nodes_list is not None:
        cmd += ['--sshloginfile', opt.nodes_list]
    if opt.ssh is not None:
        cmd += ['--ssh', opt.ssh]
    if opt.parallel_options is not None:
        cmd += opt.parallel_options.split(' ')
        
    # Assemble the options which we want GNU parallel to not mess up
    # with. 
    args_copy = args[:] # deep copy
    args_copy += ["--work-dir", opt.work_dir]
    if opt.isisroot  is not None: 
        args_copy += ["--isisroot",  opt.isisroot]
    if opt.isisdata is not None: 
        args_copy += ["--isisdata", opt.isisdata]
    
    # Quote entities that have spaces in them. Don't quote quantities
    # already quoted.
    for index, arg in enumerate(args_copy):
        if re.search(r"[ \t]", arg) and arg[0] != '\'':
            args_copy[index] = '\'' + arg + '\''
            
    python_path = sys.executable # children must use same Python as parent
    start = step; stop = start + 1
    
    # Put options in a single string.
    args_str = python_path + " "              + \
               " ".join(args_copy)            + \
               " --entry-point " + str(start) + \
               " --stop-point " + str(stop) 
    args_str += " --tile-id {}"
    cmd += [args_str]

    # This is a bugfix for RHEL 8. The 'parallel' program fails to start with ASP's
    # libs, so temporarily hide them.
    if 'LD_LIBRARY_PATH' in os.environ:
        os.environ['ASP_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
        os.environ['LD_LIBRARY_PATH'] = ''
        
    # Run 'parallel'
    asp_system_utils.generic_run(cmd, opt.verbose)

    # Undo the above
    if 'ASP_LIBRARY_PATH' in os.environ:
        os.environ['LD_LIBRARY_PATH'] = os.environ['ASP_LIBRARY_PATH']

def tile_run(prog, args, settings, tile, **kw):
    '''Job launch wrapper for a single tile'''

    if prog != 'stereo_blend':  # Set collar_size argument to zero in almost all cases.
        set_option(args, '--sgm-collar-size', [0])

    # Get tool path
    binpath = bin_path(prog)

    # Measure the memory usage on Linux and elapsed time
    timeCmd = []
    if 'linux' in sys.platform and os.path.exists('/usr/bin/time'):
        timeCmd = ['/usr/bin/time', '-f', prog + \
                   ': elapsed=%E ([hours:]minutes:seconds), memory=%M (kb)']
    
    try:
        # Get tile folder
        tile_dir_string = tile_dir(settings['out_prefix'][0], tile) + "/" + tile.name_str()

        # When using SGM correlation, increase the output tile size.
        # - The output image will contain more populated pixels but 
        #   there will be no other change.
        adjusted_tile = grow_crop_tile_maybe(settings, prog, tile)
        if adjusted_tile.width <= 0 or adjusted_tile.height <= 0:
            return # the produced tile is empty
        
        # Increase the processing block size for the tile so we process
        # the entire tile in one go.
        if use_padded_tiles(settings) and prog == 'stereo_corr':
            curr_tile_size = max(adjusted_tile.width, adjusted_tile.height)
            set_option(args, '--corr-tile-size', [curr_tile_size])

        # Set up the call string
        call = [binpath]
        call.extend(args)

        if opt.threads_multi is not None:
            asp_cmd_utils.wipe_option(call, '--threads', 1)
            call.extend(['--threads', str(opt.threads_multi)])

        cmd = call + ['--trans-crop-win'] + adjusted_tile.as_array() # append the region to process
        cmd[cmd.index(settings['out_prefix'][0])] = tile_dir_string # out prefix for this tile

        if opt.dryrun:
            print(" ".join(cmd))
            return
        if opt.verbose:
            print(" ".join(cmd))

        # See if perhaps we can skip correlation
        if prog == 'stereo_corr' and opt.resume_at_corr:

            D = tile_dir_string + '-D.tif'
            if (not os.path.islink(D)) and asp_system_utils.is_valid_image(D):
                # The disparity D.tif is valid and not a symlink. No need
                # to recreate it.
                return

            Dnosym = tile_dir_string + '-Dnosym.tif'
            if (not os.path.islink(Dnosym)) and asp_system_utils.is_valid_image(Dnosym):
                # In a previous run D.tif was renamed to Dnosym.tif
                # and D.tif was made into a symlink. Still good.
                # Just undo the rename.
                if os.path.exists(D):
                    os.remove(D)
                os.rename(Dnosym, D)
                return
            
            # We are left with the situation that there is no image which is both
            # valid and not a symlink. Perhaps D does not exist or is corrupted.
            # Then wipe D and Dnosym, if present, and redo the correlation.
            print("Will run correlation to create a valid image for " + D)
            if os.path.exists(D):
                os.remove(D)
            if os.path.exists(Dnosym):
                os.remove(Dnosym)

        cmd = timeCmd + cmd

        (out, err, status) = asp_system_utils.executeCommand(cmd, realTimeOutput = True)

        if len(timeCmd) > 0:
            print(err)
            usage_file = tile_dir_string + "-" + prog + "-resource-usage.txt"  
            with open(usage_file, 'w') as f:
                f.write(err)

        if status != 0:
            raise Exception('Stereo step ' + kw['msg'] + ' failed')

    except OSError as e:
        raise Exception('%s: %s' % (binpath, e))

def normal_run(prog, args, **kw):
    '''Job launch wrapper for a non-tile stereo call.'''

    binpath = bin_path(prog)
    call = [binpath]
    call.extend(args)
    
    if opt.threads_single is not None:
        asp_cmd_utils.wipe_option(call, '--threads', 1)
        call.extend(['--threads', str(opt.threads_single)])

    call = reduce_num_threads_in_pprc(call)

    if opt.dryrun:
        print('%s' % ' '.join(call))
        return
    if opt.verbose:
        print('%s' % ' '.join(call))
    try:
        code = subprocess.call(call)
    except OSError as e:
        raise Exception('%s: %s' % (binpath, e))
    if code != 0:
        raise Exception('Stereo step ' + kw['msg'] + ' failed')

def isVrt(filename):
    '''
    Peek at the first line and see if it this is a vrt.
    '''

    if (not os.path.exists(filename)) or os.path.isdir(filename):
        return False

    try:
        with open(filename, 'r', encoding='utf-8') as f:
            line = f.readline().lower()
            if line.startswith('<vrtdataset'):
                return True
    except:
        # Binary files will be handled here
        return False
    
    return False

def keepOnlySpecified(keep_only, out_prefix, subdirs):
    """
    Merge all VRTs and wipe any files the user does not want to keep.
    """
    
    if keep_only == "unchanged":
      return # keep everything unchanged
    elif keep_only == "essential":
      keep_only = ".exr -L.tif -F.tif disp-diff.tif -PC.tif"
    
    # List all the files stereo may create. We will not attempt to delete
    # any files except these (and subdirs)
    all_files = set()
    for ext in ".vwip .match cropped.tif -L.tif -R.tif ask.tif .exr sub.tif -D.tif -RD.tif -B.tif disp-diff.tif -F.tif -PC.tif".split():
        for f in glob.glob(out_prefix + "*" + ext):
            all_files.add(f)

    # List all the files with the suffixes the user wants to keep    
    keep_files = set()
    if keep_only == "all_combined":
            keep_files = all_files.copy()
    else:
        for keep in keep_only.split():
            for f in glob.glob(out_prefix + "*" + keep):
                keep_files.add(f)

    if len(keep_files) == 0:
        # Sometimes the user can make a mistake here. If not sure, don't do anything.
        print("No files to keep. This is likely a user error. Not deleting any files.")
        return
        
    for f in keep_files:
        print("Keeping: " + f)
        if isVrt(f):
            f_out = os.path.splitext(f)[0] + "_merged.tif"
            print("Convert " + f + " from VRT to TIF format.")
            cmd = ['gdal_translate', '-co', 'compress=lzw', '-co', 'TILED=yes', '-co',
                   'INTERLEAVE=BAND', '-co', 'BLOCKXSIZE=256', '-co', 'BLOCKYSIZE=256',
                   '-co', 'BIGTIFF=IF_SAFER', f, f_out]
            (out, err, status) = asp_system_utils.executeCommand(cmd, realTimeOutput = True)
            if status != 0:
                raise Exception('Failed converting VRT to TIF. Qutting. ' + \
                                'Likely this run is partially wiped now.')
            print("Renaming: " + f_out + " to " + f)
            shutil.move(f_out, f)

    # Delete the other files and directories
    for f in list(all_files) + subdirs:
        if f not in keep_files:
            print("Deleting: " + f)
            if os.path.isdir(f):
                shutil.rmtree(f)
            else:
                os.remove(f)
    
    # It looks useful to print the final cloud, as the messages above
    # can be quite verbose.
    pc_file = glob.glob(out_prefix + "-PC.tif")
    if len(pc_file) > 0:
        print("Output point cloud: " + pc_file[0])

def set_collar_size(new_collar_size, args, settings):
    """
    Set collar_size to a new value, and update the settings and args.
    """
    
    collar_size = new_collar_size
    set_option(args, '--sgm-collar-size', [str(collar_size)])
    settings['collar_size'] = [str(collar_size)]
    return (settings, args)
            
if __name__ == '__main__':
    usage = '''parallel_stereo [options] <images> [<cameras>]
                  <output_file_prefix> [DEM]
        Extensions are automatically added to the output files.
        Camera model arguments may be optional for some stereo
        session types (e.g. isis). Stereo parameters should be
        set in the stereo.default file.\n''' + get_asp_version()

    # What makes this program different from stereo is that it
    # tries to treat ASP as a multi-process system instead of a
    # multi-threaded executable. This has benefits on the super
    # computer by allowing a single stereo pair use multiple
    # computers. It also allows us to get past the single-threaded
    # constraints of ISIS.

    # Algorithm: When the script is started, it starts one copy of
    # itself on each node if doing steps 1, 2, or 4 (corr, rfne, tri).
    # Those scripts in turn start actual jobs on those nodes.
    # For the other steps, the script does the work itself.

    # Python does not know how to disambiguate between two
    # options which start with a similar prefix.
    if '--threads' in sys.argv:
        print("Ignoring the option --threads. Use --threads-multiprocess " + \
              "and --threads-singleprocess.")
        asp_cmd_utils.wipe_option(sys.argv, '--threads', 1)

    p = argparse.ArgumentParser(usage=usage)
    p.add_argument('--nodes-list',           dest='nodes_list', default=None,
                   help='The list of computing nodes, one per line. ' + \
                   'If not provided, run on the local machine.')
    p.add_argument('--ssh',                  dest='ssh', default=None,
                   help='The path to the ssh program to use to connect to other nodes')
    p.add_argument('--processes',            dest='processes', default=None,
                   type=int, help='The number of processes to use per node.')
    p.add_argument('--threads-multiprocess', dest='threads_multi', default=None,
                   type=int, help='The number of threads to use per process when running multiple processes.')
    p.add_argument('--threads-singleprocess',dest='threads_single', default=None,
                   type=int,
                   help='The number of threads to use when running a single process (PPRC and FLTR).')
    p.add_argument('--corr-seed-mode',       dest='seed_mode', default=None,
                   help='Correlation seed strategy. See stereo_corr for options.',
                   type=int)
    p.add_argument('-e', '--entry-point',    dest='entry_point', default=0,
                   help='Stereo Pipeline entry point (an integer from 0-5).',
                   type=int)
    p.add_argument('--stop-point',           dest='stop_point',  default=6,
                   help='Stereo Pipeline stop point (an integer from 1-6). Stop ' + 
                   'before this step.',
                   type=int)
    p.add_argument('--job-size-w',           dest='job_size_w',  default=-1,
                   help='Pixel width of input image tile for a single process. ' + \
                   'Set automatically.',
                   type=int)
    p.add_argument('--job-size-h',           dest='job_size_h',  default=-1,
                   help='Pixel height of input image tile for a single process. ' + \
                     'Set automatically.',
                   type=int)
    p.add_argument('--sgm-collar-size',      dest='sgm_collar_size', default=-1,
                   help='The padding around each tile to process. Set automatically.',
                   type=int)
    p.add_argument('--resume-at-corr', dest='resume_at_corr', default=False, 
                   action='store_true',
                   help='Start at the correlation stage and skip recomputing the valid ' + \
                   'low and full-res disparities for that stage. Do not change ' + \
                   '--left-image-crop-win, etc., when running this.')
    p.add_argument('--prev-run-prefix', dest='prev_run_prefix', default=None,
                   help='Start at the triangulation stage while reusing the data from this prefix. The new run can use different cameras, bundle adjustment prefix, or bathy planes (if applicable). Do not change crop windows, as that would invalidate the run.')
    p.add_argument('--keep-only', dest='keep_only', default="all_combined",
                   help='If set to "all_combined", which is the default, at the end of a ' + \
                   'successful run combine the results from subdirectories into .tif ' + \
                   'files with the given output prefix, and delete the ' + \
                   'subdirectories. If set to "essential", keep only PC.tif and the ' + \
                   'files needed to recreate it (those ending with .exr, -L.tif, ' + \
                   '-F.tif). If set to "unchanged", keep the run directory as it ' + \
                   'is. For fine-grained control, specify a quoted list of suffixes of ' + \
                   'files to keep, such as ".exr .match -L.tif -PC.tif".')
    p.add_argument('--sparse-disp-options', dest='sparse_disp_options',
                   help='Options to pass directly to sparse_disp. Use quotes around ' + \
                   'this string.')
    p.add_argument('-v', '--version',        dest='version', default=False,
                   action='store_true', help='Display the version of software.')
    p.add_argument('-s', '--stereo-file',    dest='stereo_file', default='./stereo.default',
                   help='Explicitly specify the stereo.default file to use. [default: ./stereo.default]')
    p.add_argument('--verbose', dest='verbose', default=False, action='store_true',
                   help='Display the commands being executed.')
    p.add_argument('--parallel-options', dest='parallel_options', default='--sshdelay 0.2',
                   help='Options to pass directly to GNU Parallel.')
    # Internal variables below.
    # The id of the tile to process, 0 <= tile_id < num_tiles.
    p.add_argument('--tile-id', dest='tile_id', default=None, type=int,
                   help=argparse.SUPPRESS)
    # Directory where the job is running
    p.add_argument('--work-dir', dest='work_dir', default=None,
                   help=argparse.SUPPRESS)
    # ISIS settings
    p.add_argument('--isisroot', dest='isisroot', default=None,
                   help=argparse.SUPPRESS)
    p.add_argument('--isisdata', dest='isisdata', default=None,
                   help=argparse.SUPPRESS)

    # Debug option
    p.add_argument('--dry-run', dest='dryrun', default=False, action='store_true',
                   help="Do not launch the jobs, only print the commands that should be run.")

    global opt
    (opt, args) = p.parse_known_args()
    args = clean_args(args)
    
    if opt.version:
        asp_system_utils.print_version_and_exit()

    if not args and not opt.version:
        p.print_help()
        die('\nERROR: Missing input files', code=2)

    # Ensure our 'parallel' is not out of date
    check_parallel_version()

    if opt.tile_id is None and opt.resume_at_corr:
        print("Resuming at the correlation stage.")
        opt.entry_point = Step.corr
        if opt.stop_point <= Step.corr:
            raise Exception('Cannot resume at correlation if --stop-point ' + \
                            'is set to stop before that.')

    if opt.threads_single is None:
        opt.threads_single = get_num_cpus()

    # If corr-seed-mode was not specified, read it from the file
    if opt.seed_mode is None:
        opt.seed_mode = parse_corr_seed_mode(opt.stereo_file)
    # If not set in the file either, use 1.
    if opt.seed_mode is None:
        opt.seed_mode = 1
    # Pass it to the subprocesses
    args.extend(['--corr-seed-mode', str(opt.seed_mode)])

    if os.path.exists(opt.stereo_file):
        args.extend(['--stereo-file', opt.stereo_file])

    if opt.tile_id is None:
        # When the script is started, set some options from the
        # environment which we will pass to the scripts we spawn
        # 1. Set the work directory
        opt.work_dir = os.getcwd()
        # 2. Set the ISIS settings if any
        if 'ISISROOT' in os.environ: opt.isisroot = os.environ['ISISROOT']
        if 'ISISDATA' in os.environ: opt.isisdata = os.environ['ISISDATA']
    else:
        # After the script spawns itself to nodes, it starts in the
        # home dir. Make it go to the right place.
        os.chdir(opt.work_dir)
        # Set the ISIS settings
        if opt.isisroot  is not None: os.environ['ISISROOT' ] = opt.isisroot
        if opt.isisdata is not None: os.environ['ISISDATA'] = opt.isisdata

    # This command needs to be run after we switch to the work directory,
    # hence no earlier than this point.
    sep = ","
    settings = run_and_parse_output("stereo_parse", args, sep, opt.verbose)
    out_prefix = settings['out_prefix'][0]

    # In the master process, need to create the list of nodes. Must happen
    # after we are in the work dir and have out_prefix. This ensures
    # the list is not in a temp dir of one of the nodes.
    if opt.tile_id is None and opt.nodes_list is not None:
        if not os.path.isfile(opt.nodes_list):
            die('\nERROR: No such nodes-list file: ' + opt.nodes_list, code=2)
        local_nodes_list = out_prefix + "-nodes-list.txt"
        mkdir_p(os.path.dirname(local_nodes_list))
        if opt.nodes_list != local_nodes_list:
            shutil.copy2(opt.nodes_list, local_nodes_list)
            opt.nodes_list = local_nodes_list
        asp_cmd_utils.wipe_option(sys.argv, '--nodes-list', 1)
        sys.argv.extend(['--nodes-list', opt.nodes_list])
        print("Nodes list: " + opt.nodes_list)
      
    # Padded tiles are needed if later we do blending 
    using_padded_tiles = use_padded_tiles(settings)

    # By default use 8 threads for SGM/MGM. Then use proportionately fewer
    # processes to not run out of RAM. Otherwise see the function
    # get_best_procs_threads().
    # TODO(oalexan1): This logic needs to be all in one place.
    # For ISIS needs to use more processes in triangulation.
    alg = stereo_alg_to_num(settings['stereo_algorithm'][0])
    if alg > VW_CORRELATION_BM and alg < VW_CORRELATION_OTHER:
        if opt.threads_multi is None:
            opt.threads_multi = 8
        if opt.processes is None:
            num_cpus = get_num_cpus()
            opt.processes = int(float(num_cpus) / float(opt.threads_multi) + 0.5)
            if opt.processes <= 0:
                opt.processes = 1

    if opt.version:
        args.append('-v')

    if int(opt.job_size_h < 0) + int(opt.job_size_w < 0) == 1:
        raise Exception('Must set neither or both of --job-size-h and --job-size-w.')
        
    # If --job-size-h and --job-size-w are not set, set them depending on the algorithm
    localEpi = (settings['alignment_method'][0] == 'local_epipolar')
    if opt.job_size_h < 0:
        if alg >= VW_CORRELATION_OTHER or localEpi:
            # Local alignment needs small tiles
            opt.job_size_w = 512
            opt.job_size_h = 512
        elif alg == VW_CORRELATION_BM:
            # Regular block matching
            opt.job_size_w = 2048
            opt.job_size_h = 2048
        elif alg > VW_CORRELATION_BM:
            # SGM, MGM, etc.
            opt.job_size_w = 2048
            opt.job_size_h = 2048
    if opt.sgm_collar_size < 0:
        if alg >= VW_CORRELATION_OTHER or localEpi:
            # Local alignment
            opt.sgm_collar_size = 128
        elif alg == VW_CORRELATION_BM:
            # Regular block matching
            opt.sgm_collar_size = 0
        elif alg > VW_CORRELATION_BM:
            # SGM, MGM, etc.
            opt.sgm_collar_size = 256
    
    # Update the collar size in the settings and args
    (settings, args) = set_collar_size(opt.sgm_collar_size, args, settings)

    sep2 = '--non-comma-separator--' # a string unlikely to be in the output
    georef=run_and_parse_output("stereo_parse", args, sep2, opt.verbose)
    georef["WKT"] = "".join(georef["WKT"])
    georef["GeoTransform"] = "".join(georef["GeoTransform"])

    print ('Using tiles (before collar addition) of ' + str(opt.job_size_w) + \
           ' x ' + str(opt.job_size_h) + ' pixels.')
    print('Using a collar (padding) for each tile of ' + settings['collar_size'][0] + \
          ' pixels.')
    
    # See if to resume at triangulation. This logic must happen after we figured
    # if we need padded tiles, otherwise the bookkeeping will be wrong.
    if opt.tile_id is None and opt.prev_run_prefix is not None:
        print("Starting at the triangulation stage while reusing a previous run.")
        opt.entry_point = Step.tri
        if opt.stop_point <= Step.tri:
            raise Exception('Cannot resume at triangulation if --stop-point ' + \
                            'is set to stop before that.')
        sym_link_prev_run(opt.prev_run_prefix, out_prefix)

        # Now that we have data, update the settings value
        settings = run_and_parse_output("stereo_parse", args, sep, opt.verbose)

        # Create the directory tree for this run
        subdirs = create_subproject_dirs(settings)

    # parallel_args is what we will pass to spawned copies of parallel_stereo,
    # while 'args' is what we pass to plain stereo.
    parallel_args = sys.argv # shallow copy
    
    # Individual tools may use these options, but then they confuse parallel_stereo.
    # They are usually encountered in log files.
    if '--compute-point-cloud-center-only' in parallel_args:
      raise Exception('Option --compute-point-cloud-center-only is not ' + \
                      'supported. Use stereo_tri instead.')
    if '--compute-low-res-disparity-only' in parallel_args:
        raise Exception('Option --compute-low-res-disparity-only is not ' + \
                        'supported. Use stereo_corr instead.')

    # TODO(oalexan1): The giant block below needs to be broken up into
    # several functions named parent_run(), child_run(), and
    # multiview_run(). Careful testing will be needed.
    if opt.tile_id is None:

        # We get here when the script is started. The current running
        # process has become the management process that spawns other
        # copies of itself on other machines. This block will only do
        # actual work when we hit a non-multiprocess step like PPRC or FLTR.

        # Wipe options which we will override.
        asp_cmd_utils.wipe_option(parallel_args, '-e', 1)
        asp_cmd_utils.wipe_option(parallel_args, '--entry-point', 1)
        asp_cmd_utils.wipe_option(parallel_args, '--stop-point', 1)
        
        num_pairs = int(settings['num_stereo_pairs'][0])
        if num_pairs > 1:
            # Run multiview logic. 
            # TODO(oalexan1): This must be a function.
            if opt.keep_only == "all_combined":
                # Change the default to "unchanged"
                opt.keep_only = "unchanged"
            if opt.keep_only != "unchanged":
                # If the user specified something else
                raise Exception('Option --keep-only does not work with multiview stereo.')
            
            # Bugfix: avoid confusing the logic below
            asp_cmd_utils.wipe_option(parallel_args, '-s', 1)
            if os.path.exists(opt.stereo_file):
                parallel_args.extend(['--stereo-file', opt.stereo_file])

            # Map from option, say --job-size-w, to the variable name, say, job_size_w.
            opt2var = {}
            for x in p._actions:
                # For cases like ['-l', '--long'] get the second entry
                opt_str = getattr(x, 'option_strings')[-1]
                opt2var[opt_str] = getattr(x, 'dest')

            # Find the options used by parallel_stereo which are not
            # passed to the stereo executables. This is a bit fragile.
            extra_args = []
            option_dict = vars(opt) # Each option understood by python, and its parsed value

            for arg in parallel_args[1:]:

                if arg in args:
                    # This is an option passed to stereo
                    continue

                if len(arg) >= 2 and arg[0] == '-' and arg[1] != '-':
                    # We expect two dashes
                    raise Exception('Not implemented, failed to parse: ' + arg)

                if not arg in opt2var.keys():
                    # Does not start with dash
                    continue 

                var = opt2var[arg]

                if option_dict[var] is None:
                    # This option was not specified
                    continue

                if isinstance(option_dict[var], bool):
                    # This is a bool flag, append it with no value
                    extra_args.append(arg)
                    continue

                if (isinstance(option_dict[var], int)    or
                    isinstance(option_dict[var], float)  or
                    asp_string_utils.isString(option_dict[var])):
                    # For int, float, and string, append together with value
                    extra_args.append(arg)
                    extra_args.append(str(option_dict[var]))
                    continue

                # We cannot currently parse other options than int, string, float, and bool.
                # Should be simple to fix if need be. 
                raise Exception('Could not parse: ' + arg)

            if opt.entry_point < Step.tri:
                run_multiview(__file__, args, extra_args, opt.entry_point,
                              opt.stop_point, opt.verbose, settings)
                # Everything is done.
                sys.exit(0)
            else:
                # We will arrive here after this script invokes itself
                # for multivew.  Set up the directories and run Step.tri.
                create_symlinks_for_multiview(settings, opt)

        # Parent script logic for each step

        # Preprocessing
        step = Step.pprc
        if (opt.entry_point <= step):
            if (opt.stop_point <= step):
                sys.exit()
            normal_run('stereo_pprc', args, msg='%d: Preprocessing' % step)
            create_subproject_dirs(settings) # symlink L.tif, etc
            # Now the left is defined. Regather the settings
            # and properly create the project dirs.
            settings = run_and_parse_output("stereo_parse", args, sep,
                                            opt.verbose)

        # Correlation
        step = Step.corr
        if (opt.entry_point <= step):
            if (opt.stop_point <= step):
                sys.exit()

            # Do low-res correlation, this happens just once.
            calc_lowres_disp(args, opt, sep, resume = opt.resume_at_corr)

            # symlink D_sub, D_sub_spread, etc.
            create_subproject_dirs(settings)

            # Run full-res stereo using multiple processes.
            check_system_memory(opt, args, settings)
            parallel_args.extend(['--skip-low-res-disparity-comp'])
            spawn_to_nodes(step, settings, parallel_args)
            # Low-res disparity is done, so wipe that option
            asp_cmd_utils.wipe_option(parallel_args, '--skip-low-res-disparity-comp', 0)
            
            # Bugfix: When doing refinement for a given tile, we must see
            # the result of correlation for all tiles. To achieve that,
            # rename all correlation tiles to something else,
            # build the vrt of all correlation tiles, and sym link
            # that vrt from all tile directories.
            rename_files(settings, "-D.tif", "-Dnosym.tif")
            build_vrt('stereo_corr', settings, georef, "-D.tif", "-Dnosym.tif", 
                      contract_tiles = using_padded_tiles)
            create_subproject_dirs(settings) # symlink D.tif

        skip_refine_step = (int(settings['subpixel_mode'][0]) > 6)

        # Blending (when using local_epipolar alignment, or SGM/MGM, or external algorithms)
        step = Step.blend
        if (opt.entry_point <= step):
            if (opt.stop_point <= step):
                sys.exit()
            
            if not using_padded_tiles:
                print("No work to do at the blending step.")
            else:
                create_subproject_dirs(settings)
                spawn_to_nodes(step, settings, parallel_args)

                if not skip_refine_step:
                    # Do the same trick as after stereo_corr
                    rename_files(settings, "-B.tif", "-Bnosym.tif")
                    build_vrt('stereo_blend', settings, georef, "-B.tif", "-Bnosym.tif", 
                              contract_tiles = False)
                    create_subproject_dirs(settings) # symlink B.tif

            # At the blending step, make a vrt from the per-tile lr-disp differences.
            if settings['save_lr_disp_diff'][0] != '0':
                if using_padded_tiles:
                    build_vrt('stereo_blend', settings, georef,
                            "-L-R-disp-diff.tif", "-L-R-disp-diff-blend.tif")
                else:
                    build_vrt('stereo_blend', settings, georef,
                            "-L-R-disp-diff.tif", "-L-R-disp-diff.tif")

        # Refinement
        step = Step.rfne
        if (opt.entry_point <= step):
            if (opt.stop_point <= step):
                sys.exit()
            # If we used SGM/MGM or some other method making use of
            # tiles, need to read from the blend file.
            if using_padded_tiles:
                parallel_args.extend(['--subpix-from-blend'])
            if not skip_refine_step:
                create_subproject_dirs(settings)
                spawn_to_nodes(step, settings, parallel_args)

        # Filtering
        step = Step.fltr
        if (opt.entry_point <= step):
            if (opt.stop_point <= step):
                sys.exit()
            try:
                build_vrt('stereo_rfne', settings, georef, "-RD.tif", "-RD.tif")
            except Exception as e:
                # Make the error message more informative
                raise Exception('Failed to build a VRT from */*RD.tif files. Must redo at least the refinement step. Additional error message: ' + str(e))
                
            normal_run('stereo_fltr', args, msg='%d: Filtering' % step)
            create_subproject_dirs(settings) # symlink F.tif

        # Triangulation
        # TODO(oalexan1): For stereo we do not need to pad the tiles, which should
        # be a nice speed up. But then must ensure both the padded and unpadded
        # tiles are wiped after the run is done.
        step = Step.tri
        subdirs = []
        if (opt.entry_point <= step):
            if (opt.stop_point <= step):
                sys.exit()

            # TODO(oalexan1): Must wipe the subdirs!
            if int(settings['correlator_mode'][0]) != 0:
                # Skip triangulation, but continue further down with cleanup.
                print("Skipping triangulation with --correlator-mode.")
            else:
                # Compute the cloud center. Done once per run.
                tmp_args = args[:] # deep copy
                tmp_args.append('--compute-point-cloud-center-only')
                normal_run('stereo_tri',  tmp_args, msg='%d: Triangulation' % step)
                # Point cloud center computation was done
                parallel_args.extend(['--skip-point-cloud-center-comp'])

                # Wipe options that should have been done when the
                # the center of point cloud was computed
                asp_cmd_utils.wipe_option(parallel_args, '--unalign-disparity', 0)
                asp_cmd_utils.wipe_option(parallel_args, '--num-matches-from-disparity', 1)
                asp_cmd_utils.wipe_option(parallel_args, '--num-matches-from-disp-triplets', 1)
                
                # symlink the files just created
                subdirs = create_subproject_dirs(settings)

                # Run triangulation on multiple machines
                spawn_to_nodes(step, settings, parallel_args)
                build_vrt('stereo_tri', settings, georef, "-PC.tif", "-PC.tif") # mosaic

        # If the run concluded successfully, merge and wipe
        if (opt.stop_point > Step.tri):
          if len(subdirs) == 0:
            # If this was not created by now, do it now
            subdirs = create_subproject_dirs(settings)
          keepOnlySpecified(opt.keep_only, out_prefix, subdirs)
                
       # End main process case
    else:

        # This process was spawned by GNU Parallel with a given
        # value of opt.tile_id. Launch the job for that tile.
        if opt.verbose:
            print("Running on machine: ", os.uname())

        try:
            # Pick the tile we want from the list of tiles
            tiles = produce_tiles(settings, opt.job_size_w, opt.job_size_h)
            tile  = tiles[opt.tile_id]

            if (opt.entry_point == Step.corr):
                check_system_memory(opt, args, settings)
                tile_run('stereo_corr', args, settings, tile,
                         msg='%d: Correlation' % opt.entry_point)

            if (opt.entry_point == Step.blend):
                tile_run('stereo_blend', args, settings, tile,
                         msg='%d: Blending' % opt.entry_point)

            if (opt.entry_point == Step.rfne):
                tile_run('stereo_rfne', args, settings, tile,
                         msg='%d: Refinement' % opt.entry_point)

            if (opt.entry_point == Step.tri):
                tile_run('stereo_tri', args, settings, tile,
                         msg='%d: Triangulation' % opt.entry_point)

        except Exception as e:
            die(e)
            raise
