Add test scripts to run tests via example application.

This commit is contained in:
Gunther H. Weber 2020-09-24 18:44:44 -07:00
parent 7a77b1ad89
commit 8b21a8e07d
7 changed files with 363 additions and 2 deletions

@ -76,4 +76,13 @@ if (VTKm_ENABLE_MPI)
if (TARGET vtkm::tbb)
target_compile_definitions(ContourTree_Distributed PRIVATE "ENABLE_SET_NUM_THREADS")
endif()
add_executable(TreeCompiler TreeCompilerApp.cxx)
target_link_libraries(TreeCompiler vtkm_filter)
vtkm_add_target_information(TreeCompiler DROP_UNUSED_SYMBOLS)
configure_file(split_data_2d.py split_data_2d.py COPYONLY)
configure_file(split_data_3d.py split_data_3d.py COPYONLY)
configure_file(hact_test.sh hact_test.sh COPYONLY)
configure_file(testrun.sh testrun.sh COPYONLY)
endif()

@ -0,0 +1,50 @@
//=======================================================================================
//
// Parallel Peak Pruning v. 2.0
//
// Started June 15, 2017
//
// Copyright Hamish Carr, University of Leeds
//
// main_tree_compiler.cpp - main routine for the external verification programme
//
//=======================================================================================
//
// COMMENTS:
//
// Just a harness for the TreeCompiler routines
//
//=======================================================================================
#include <stdio.h>
#include <vtkm/worklet/contourtree_distributed/TreeCompiler.h>
// main routine
int main(int argc, char** argv)
{ // main()
// the compiler for putting them together
vtkm::worklet::contourtree_distributed::TreeCompiler compiler;
// we just loop through the arguments, reading them in and adding them
for (int argument = 1; argument < argc; argument++)
{ // per argument
// create a temporary file
FILE* inFile = fopen(argv[argument], "r");
// if it's bad, choke
if (inFile == NULL)
{ // bad filename
printf("Bad filename %s\n", argv[argument]);
return EXIT_FAILURE;
} // bad filename
// read and append
compiler.ReadBinary(inFile);
} // per argument
// now compile and print
compiler.ComputeSuperarcs();
compiler.PrintSuperarcs(true);
return EXIT_SUCCESS;
} // main()

@ -0,0 +1,39 @@
#!/bin/sh
GTCT_DIR=${GTCT_DIR:-${HOME}/devel/parallel-peak-pruning/ContourTree/SweepAndMergeSerial/out}
RED=""
GREEN=""
NC=""
if [ -t 1 ]; then
# If stdout is a terminal, color Pass and FAIL green and red, respectively
RED=$(tput setaf 1)
GREEN=$(tput setaf 2)
NC=$(tput sgr0)
fi
echo "Removing previously generated files"
rm *.log *.dat
echo "Copying target file "$1 "into current directory"
filename=${1##*/}
fileroot=${filename%.txt}
cp $1 ${filename}
echo "Splitting data into "$2" x "$2" parts"
./split_data_2d.py ${filename} $2
rm ${filename}
echo "Running HACT"
n_parts=$(($2*$2))
echo mpirun -np 4 ./ContourTree_Distributed -d Any --numBlocks=${n_parts} ${fileroot}_part_%d_of_${n_parts}.txt
mpirun -np 4 ./ContourTree_Distributed -d Any --numBlocks=${n_parts} ${fileroot}_part_%d_of_${n_parts}.txt
rm ${fileroot}_part_*_of_${n_parts}.txt
echo "Compiling Outputs"
./TreeCompiler TreeCompilerOutput_*.dat | sort > outsort${fileroot}_$2x$2.txt
echo "Diffing"
diff outsort${fileroot}_$2x$2.txt ${GTCT_DIR}/outsort${fileroot}.txt
if test $? -eq 0; then echo "${GREEN}Pass${NC}"; rm outsort${fileroot}_$2x$2.txt; else echo "${RED}FAIL${NC}"; fi;
# echo "Generating Dot files"
# ./makedot.sh

@ -0,0 +1,58 @@
#!/usr/bin/env python3
import numpy as np
import math
import os
import sys
# Read a 2D text file from disk into a NumPy array
def read_file(fn):
data = np.fromfile(fn, dtype=np.int, sep=" ")
data = data[2:].reshape(tuple(data[0:2]))
return data
# Save a block from a 2D NumPy array to disk
def save_piece(fn, array, offset, n_blocks, size):
with open(fn, 'w') as f:
f.write('#GLOBAL_EXTENTS ' + ' '.join(map(str, array.shape)) + '\n')
f.write('#OFFSET ' + ' '.join(map(str, offset))+'\n')
f.write('#BLOCKS_PER_DIM ' + ' '.join(map(str, n_blocks))+'\n')
f.write(' '.join(map(str, size)) + '\n')
np.savetxt(f, array[offset[0]:offset[0]+size[0],offset[1]:offset[1]+size[1]], fmt='%.16g')
# Compute split points for splitting into n blocks
def split_points(shape, nblocks):
dx = float(shape-1) / nblocks
return [ math.floor(i*dx) for i in range(nblocks)] + [ shape - 1 ]
if len(sys.argv) < 2:
print("Error: Usage split_data_2d.py <filename> [<n_blocks_per_axis>|<n_blocks_x> <n_blocks_y>]", file=sys.stderr)
sys.exit(1)
# Parse parameters
in_filename = sys.argv[1]
n_blocks = (2, 2)
if len(sys.argv) > 2:
if len(sys.argv) >= 4:
n_blocks = (int(sys.argv[2]), int(sys.argv[3]))
else:
n_blocks = (int(sys.argv[2]), int(sys.argv[2]))
name, ext = os.path.splitext(in_filename)
out_filename_pattern = name + '_part_%d_of_' + str(n_blocks[0]*n_blocks[1]) + ext
# Read data
data = read_file(in_filename)
# Compute split points
split_points_x = split_points(data.shape[0], n_blocks[0])
split_points_y = split_points(data.shape[1], n_blocks[1])
# Save blocks
block_no = 0
for x_start, x_stop in zip(split_points_x, split_points_x[1:]):
for y_start, y_stop in zip(split_points_y, split_points_y[1:]):
n_x = x_stop - x_start + 1
n_y = y_stop - y_start + 1
save_piece(out_filename_pattern % block_no, data, (x_start, y_start), n_blocks, (n_x, n_y))
# print("Wrote block %d, origin %d %d, size %d %d" % (block_no, x_start, y_start, n_x, n_y))
block_no += 1

@ -0,0 +1,100 @@
#!/usr/bin/env python3
import numpy as np
import math
import os
import sys
# For readBOV
from functools import reduce
import operator
# Read a 3D text file from disk into a NumPy array
# ... Plain text
def read_file(fn):
data = np.fromfile(fn, dtype=np.int, sep=" ")
data = data[3:].reshape((data[2],data[0],data[1]))
return data
# ... VisItBOV
def readBOV(filename):
with open(filename, 'r') as f:
header = dict([(lambda x: (x[0].strip().lower(), x[1].strip()))(l.strip().split(':')) for l in f.readlines()])
if 'data_endian' in header:
if header['data_endian'].lower() != sys.byteorder:
print('Unsopported endianess ' + eader['data_endian'].lower())
return None
shape = tuple([int(x) for x in header['data_size'].split(' ')])
count = reduce(operator.mul, shape, 1)
dtype_map = { 'float': 'float32', 'double': 'float64', 'char': 'uint8' }
dtype = np.dtype(dtype_map[header['data_format'].lower()])
dataname = os.path.realpath(os.path.join(os.path.dirname(filename), header['data_file']))
if 'variable' not in header:
header['variable'] = 'val'
return (header['variable'], header['centering'].lower(), np.fromfile(dataname, dtype, count).reshape(tuple(reversed(shape))))
return None
# Save a block from a 3D NumPy array to disk
# Python order is slice, row, col
# TXT file order is row, col, slice
# offset and size are in file order
def save_piece(fn, array, offset, n_blocks, size):
with open(fn, 'w') as f:
perm = [1, 2, 0]
f.write('#GLOBAL_EXTENTS ' + ' '.join(map(str, [array.shape[i] for i in perm])) + '\n')
f.write('#OFFSET ' + ' '.join(map(str, offset))+'\n')
f.write('#BLOCKS_PER_DIM ' + ' '.join(map(str, n_blocks))+'\n')
f.write(' '.join(map(str, size)) + '\n')
if fn[-5:]=='.bdem':
array[offset[2]:offset[2]+size[2],offset[0]:offset[0]+size[0],offset[1]:offset[1]+size[1]].astype(np.double).tofile(f)
else:
for s in range(offset[2], offset[2]+size[2]):
np.savetxt(f, array[s, offset[0]:offset[0]+size[0],offset[1]:offset[1]+size[1]], fmt='%.16g')
f.write('\n')
# Compute split points for splitting into n blocks
def split_points(shape, nblocks):
dx = float(shape-1) / nblocks
return [ math.floor(i*dx) for i in range(nblocks)] + [ shape - 1 ]
if len(sys.argv) < 2:
print("Error: Usage split_data_3d.py <filename> <outfilepattern> [<n_blocks_per_axis>|<n_blocks_x> <n_blocks_y> <n_blocks_z>]", file=sys.stderr)
sys.exit(1)
# Parse parameters
in_filename = sys.argv[1]
name, ext = os.path.splitext(in_filename)
#out_filename_pattern = name + '_split_%d.txt'
out_filename_pattern = sys.argv[2]
n_blocks = (2, 2, 2)
if len(sys.argv) > 3:
if len(sys.argv) >= 6:
n_blocks = (int(sys.argv[3]), int(sys.argv[4]), int(sys.argv[5]))
else:
n_blocks = (int(sys.argv[3]), int(sys.argv[3]), int(sys.argv[3]))
# Read data
if ext == '.bov':
data = readBOV(in_filename)[2]
else:
data = read_file(in_filename)
# Python order is slice, row, col
# Compute split points
split_points_s = split_points(data.shape[0], n_blocks[0])
split_points_r = split_points(data.shape[1], n_blocks[1])
split_points_c = split_points(data.shape[2], n_blocks[2])
# Create the file that records the slice values
slice_filename = name + '_slices.txt'
# Save blocks
block_no = 0
for s_start, s_stop in zip(split_points_s, split_points_s[1:]):
for r_start, r_stop in zip(split_points_r, split_points_r[1:]):
for c_start, c_stop in zip(split_points_c, split_points_c[1:]):
n_s = s_stop - s_start + 1
n_r = r_stop - r_start + 1
n_c = c_stop - c_start + 1
save_piece(out_filename_pattern % block_no, data, (r_start, c_start, s_start), (n_r, n_c, n_s))
block_no += 1

@ -0,0 +1,99 @@
#!/bin/sh
mkdir -p out
DATA_DIR=${DATA_DIR:-${HOME}/devel/parallel-peak-pruning/Data/2D}
if [ ! -d $DATA_DIR ]; then
echo "Error: Directory $DATA_DIR does not exist!"
exit 1;
fi;
echo
echo "Starting Timing Runs"
echo
echo "8x9 Test Set"
./hact_test.sh $DATA_DIR/8x9test.txt 2
#./hact_test.sh $DATA_DIR/8x9test.txt 4
# ./hact_test.sh $DATA_DIR/8x9test.txt 8
echo
echo "Vancouver Test Set"
./hact_test.sh $DATA_DIR/vanc.txt 2
#./hact_test.sh $DATA_DIR/vanc.txt 4
# ./hact_test.sh $DATA_DIR/vanc.txt 8
# ./hact_test.sh $DATA_DIR/vanc.txt 16
echo
echo "Vancouver SWSW Test Set"
./hact_test.sh $DATA_DIR/vancouverSWSW.txt 2
./hact_test.sh $DATA_DIR/vancouverSWSW.txt 4
# ./hact_test.sh $DATA_DIR/vancouverSWSW.txt 8
# ./hact_test.sh $DATA_DIR/vancouverSWSW.txt 16
echo
echo "Vancouver SWNW Test Set"
./hact_test.sh $DATA_DIR/vancouverSWNW.txt 2
./hact_test.sh $DATA_DIR/vancouverSWNW.txt 4
# ./hact_test.sh $DATA_DIR/vancouverSWNW.txt 8
# ./hact_test.sh $DATA_DIR/vancouverSWNW.txt 16
echo
echo "Vancouver SWSE Test Set"
./hact_test.sh $DATA_DIR/vancouverSWSE.txt 2
./hact_test.sh $DATA_DIR/vancouverSWSE.txt 4
# ./hact_test.sh $DATA_DIR/vancouverSWSE.txt 8
# ./hact_test.sh $DATA_DIR/vancouverSWSE.txt 16
echo
echo "Vancouver SWNE Test Set"
./hact_test.sh $DATA_DIR/vancouverSWNE.txt 2
./hact_test.sh $DATA_DIR/vancouverSWNE.txt 4
# ./hact_test.sh $DATA_DIR/vancouverSWNE.txt 8
# ./hact_test.sh $DATA_DIR/vancouverSWNE.txt 16
echo
echo "Vancouver NE Test Set"
./hact_test.sh $DATA_DIR/vancouverNE.txt 2
./hact_test.sh $DATA_DIR/vancouverNE.txt 4
# ./hact_test.sh $DATA_DIR/vancouverNE.txt 8
# ./hact_test.sh $DATA_DIR/vancouverNE.txt 16
echo
echo "Vancouver NW Test Set"
./hact_test.sh $DATA_DIR/vancouverNW.txt 2
./hact_test.sh $DATA_DIR/vancouverNW.txt 4
# ./hact_test.sh $DATA_DIR/vancouverNW.txt 8
# ./hact_test.sh $DATA_DIR/vancouverNW.txt 16
echo
echo "Vancouver SE Test Set"
./hact_test.sh $DATA_DIR/vancouverSE.txt 2
./hact_test.sh $DATA_DIR/vancouverSE.txt 4
# ./hact_test.sh $DATA_DIR/vancouverSE.txt 8
# ./hact_test.sh $DATA_DIR/vancouverSE.txt 16
echo
echo "Vancouver SW Test Set"
./hact_test.sh $DATA_DIR/vancouverSW.txt 2
./hact_test.sh $DATA_DIR/vancouverSW.txt 4
# ./hact_test.sh $DATA_DIR/vancouverSW.txt 8
# ./hact_test.sh $DATA_DIR/vancouverSW.txt 16
echo
echo "Icefields Test Set"
./hact_test.sh $DATA_DIR/icefield.txt 2
./hact_test.sh $DATA_DIR/icefield.txt 4
# ./hact_test.sh $DATA_DIR/icefield.txt 8
# ./hact_test.sh $DATA_DIR/icefield.txt 16
# ./hact_test.sh $DATA_DIR/icefield.txt 32
# ./hact_test.sh $DATA_DIR/icefield.txt 64
echo
echo "GTOPO30 Full Tiny Test Set"
./hact_test.sh $DATA_DIR/gtopo_full_tiny.txt 2
./hact_test.sh $DATA_DIR/gtopo_full_tiny.txt 4
# ./hact_test.sh $DATA_DIR/gtopo_full_tiny.txt 8
# ./hact_test.sh $DATA_DIR/gtopo_full_tiny.txt 16
# ./hact_test.sh $DATA_DIR/gtopo_full_tiny.txt 32
# ./hact_test.sh $DATA_DIR/gtopo_full_tiny.txt 64
echo
echo "GTOPO30 UK Tile Test Set"
./hact_test.sh $DATA_DIR/gtopo30w020n40.txt 2
./hact_test.sh $DATA_DIR/gtopo30w020n40.txt 4
# ./hact_test.sh $DATA_DIR/gtopo30w020n40.txt 8
# ./hact_test.sh $DATA_DIR/gtopo30w020n40.txt 16
# ./hact_test.sh $DATA_DIR/gtopo30w020n40.txt 32
# ./hact_test.sh $DATA_DIR/gtopo30w020n40.txt 64
# ./hact_test.sh $DATA_DIR/gtopo30w020n40.txt 128
# ./hact_test.sh $DATA_DIR/gtopo30w020n40.txt 256
# ./hact_test.sh $DATA_DIR/gtopo30w020n40.txt 512
echo "Done"

@ -193,7 +193,7 @@ public:
static void PrintSuperarcArray(const std::vector<Edge>& superarc_array);
// routine to print the superarcs
void PrintSuperarcs() const;
void PrintSuperarcs(bool) const;
// routine to write out binary file
void WriteBinary(FILE* outFile) const;
@ -380,8 +380,14 @@ inline void TreeCompiler::PrintSuperarcArray(const std::vector<Edge>& superarc_a
} // TreeCompiler::PrintSuperarcArray()
inline void TreeCompiler::PrintSuperarcs() const
inline void TreeCompiler::PrintSuperarcs(bool printHeader = false) const
{
if (printHeader)
{
std::cout << "============" << std::endl;
std::cout << "Contour Tree" << std::endl;
}
PrintSuperarcArray(this->superarcs);
}