-
Notifications
You must be signed in to change notification settings - Fork 43
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #552 from girder/test-utility-scripts
Add some utility scripts that have been useful in testing.
- Loading branch information
Showing
3 changed files
with
144 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
******************************** | ||
Large Image Taet Utility Scripts | ||
******************************** | ||
|
||
This directory consists of files that were useful in testing various aspects of | ||
large_image. These files are not part of the stable library interface nor part | ||
of the regularly used testing code. They may not be maintained. | ||
|
||
Some utilities: | ||
|
||
- compression_test.py - Recompress input files with a wide set of options. | ||
Compute full statistics on each output file to determine the amount of loss | ||
introduced with those compression options when appropriate. | ||
|
||
- compression_test_summary.py - Collect the results from the compression test | ||
and output a csv file. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,80 @@ | ||
import concurrent.futures | ||
import itertools | ||
import os | ||
import psutil | ||
import subprocess | ||
import sys | ||
|
||
os.environ['GDAL_PAM_PROXY_DIR'] = '/tmp/gdal' | ||
|
||
allOpts = ['-w', '--stats-full'] | ||
matrix = [[ | ||
[], | ||
['--compression', 'none'], | ||
['--compression', 'jpeg'], | ||
['--compression', 'jpeg', '-q', '95'], | ||
['--compression', 'jpeg', '-q', '90'], | ||
['--compression', 'jpeg', '-q', '80'], | ||
['--compression', 'jpeg', '-q', '70'], | ||
['--compression', 'deflate'], | ||
['--compression', 'deflate', '--level', '1'], | ||
['--compression', 'deflate', '--level', '9'], | ||
['--compression', 'lzw'], | ||
['--compression', 'lzw', '--predictor', 'none'], | ||
['--compression', 'lzw', '--predictor', 'horizontal'], | ||
['--compression', 'zstd'], | ||
['--compression', 'zstd', '--level', '1'], | ||
['--compression', 'zstd', '--level', '9'], | ||
['--compression', 'zstd', '--level', '22'], | ||
['--compression', 'packbits'], | ||
# ['--compression', 'jbig'], | ||
# ['--compression', 'lzma'], | ||
['--compression', 'webp'], | ||
['--compression', 'webp', '-q', '0'], | ||
['--compression', 'webp', '-q', '100'], | ||
['--compression', 'webp', '-q', '95'], | ||
['--compression', 'webp', '-q', '90'], | ||
['--compression', 'webp', '-q', '80'], | ||
['--compression', 'webp', '-q', '70'], | ||
['--compression', 'jp2k'], | ||
['--compression', 'jp2k', '--psnr', '80'], | ||
['--compression', 'jp2k', '--psnr', '70'], | ||
['--compression', 'jp2k', '--psnr', '60'], | ||
['--compression', 'jp2k', '--psnr', '50'], | ||
['--compression', 'jp2k', '--psnr', '40'], | ||
['--compression', 'jp2k', '--cr', '10'], | ||
['--compression', 'jp2k', '--cr', '100'], | ||
['--compression', 'jp2k', '--cr', '1000'], | ||
], [ | ||
[], # 256 | ||
['--tile', '512'], | ||
['--tile', '1024'], | ||
]] | ||
|
||
if not len(sys.argv[1:]) or '--help' in sys.argv[1:]: | ||
print("""test_compression.py [(concurrency)] (output directory) (input file ...)""") | ||
sys.exit(0) | ||
args = sys.argv[1:] | ||
concurrency = psutil.cpu_count(logical=True) | ||
if args[0].isdigit(): | ||
concurrency = int(args[0]) | ||
args = args[1:] | ||
# Set to 1 to disable concurrency | ||
pool = concurrent.futures.ThreadPoolExecutor(max_workers=concurrency) | ||
tasks = [] | ||
for input in args[1:]: | ||
root = os.path.join(args[0], os.path.basename(input)) | ||
for optList in itertools.product(*matrix): | ||
opts = [opt for subList in optList for opt in subList] | ||
output = root + '.' + '.'.join(str(opt).strip('-') for opt in opts) + '.tiff' | ||
output = output.replace('..', '.') | ||
cmd = ['large_image_converter', input, output] + opts + allOpts | ||
tasks.append((cmd, pool.submit(subprocess.call, cmd))) | ||
while len(tasks): | ||
try: | ||
tasks[0][-1].result(0.1) | ||
except concurrent.futures.TimeoutError: | ||
continue | ||
cmd, task = tasks.pop(0) | ||
print(' '.join(cmd)) | ||
pool.shutdown(False) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
import json | ||
import os | ||
import pandas | ||
import sys | ||
import tifftools | ||
|
||
if not len(sys.argv[1:]) or '--help' in sys.argv[1:]: | ||
print("""test_compression_summary.py (directory) (csv output file) | ||
Check each file in the specified directory and gather appropriate statistics | ||
into csv output.""") | ||
sys.exit(0) | ||
results = [] | ||
for file in os.listdir(sys.argv[1]): | ||
path = os.path.join(sys.argv[1], file) | ||
entry = {'file': file} | ||
if '.compression' in file: | ||
entry['source'] = file.split('.compression')[0] | ||
elif '.tile' in file: | ||
entry['source'] = file.split('.tile')[0] | ||
else: | ||
entry['source'] = file.split('.tiff')[0] | ||
entry['error'] = None | ||
results.append(entry) | ||
try: | ||
info = tifftools.read_tiff(path) | ||
except Exception: | ||
entry['error'] = "Can't read" | ||
print(entry) | ||
continue | ||
try: | ||
details = json.loads(info['ifds'][0]['tags'][tifftools.Tag.ImageDescription.value]['data']) | ||
except Exception: | ||
entry['error'] = "Can't parse ImageDescription" | ||
print(entry) | ||
continue | ||
entry.update(details['large_image_converter']['arguments']) | ||
try: | ||
entry.update({ | ||
k if k != 'psnr' else 'stats_psnr': v | ||
for k, v in details['large_image_converter']['conversion_stats'].items()}) | ||
except Exception: | ||
entry['error'] = 'No conversion stats' | ||
print(entry) | ||
continue | ||
print(entry) | ||
df = pandas.DataFrame.from_dict(results) | ||
df.to_csv(sys.argv[2], index=False) |