Browse Source

first working version

master
Holger Frey 6 years ago
commit
aa050f339f
  1. 72
      .gitignore
  2. 63
      Makefile
  3. 85
      Pillow help.md
  4. 21
      mtor/__init__.py
  5. 430
      mtor/dataproc.py
  6. 52
      mtor/filesort.py
  7. 122
      mtor/imageproc.py
  8. 770
      mtor/luts.py
  9. 163
      mtor/prescan.py
  10. 13
      pyproject.toml
  11. 316
      test.py
  12. 9
      test_mtor.py

72
.gitignore vendored

@ -0,0 +1,72 @@ @@ -0,0 +1,72 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# test data
mtor-bilder/
oringinal-daten/
# C extensions
*.so
# Virtual environments
env/
.env/
venv/
.venv/
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
pip-wheel-metadata/
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# OS specific stuff
.DS_Store

63
Makefile

@ -0,0 +1,63 @@ @@ -0,0 +1,63 @@
.PHONY: clean clean-test clean-pyc clean-build help
.DEFAULT_GOAL := help
define BROWSER_PYSCRIPT
import os, webbrowser, sys
try:
from urllib import pathname2url
except:
from urllib.request import pathname2url
webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1])))
endef
export BROWSER_PYSCRIPT
define PRINT_HELP_PYSCRIPT
import re, sys
for line in sys.stdin:
match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line)
if match:
target, help = match.groups()
print("%-20s %s" % (target, help))
endef
export PRINT_HELP_PYSCRIPT
BROWSER := python -c "$$BROWSER_PYSCRIPT"
help:
@python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)
clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts
clean-build: ## remove build artifacts
rm -fr build/
rm -fr dist/
rm -fr .eggs/
find . -name '*.egg-info' -exec rm -fr {} +
find . -name '*.egg' -exec rm -f {} +
clean-pyc: ## remove Python file artifacts
find . -name '*.pyc' -exec rm -f {} +
find . -name '*.pyo' -exec rm -f {} +
find . -name '*~' -exec rm -f {} +
find . -name '__pycache__' -exec rm -fr {} +
clean-test: ## remove test and coverage artifacts
rm -fr .tox/
rm -f .coverage
rm -fr htmlcov/
lint: ## check style with flake8
black mtor
flake8 mtor
test: ## run tests quickly with the default Python
py.test -x --disable-warnings
coverage: ## check code coverage with the default Python
coverage run --source lxpy -m pytest
coverage report -m
coverage html
$(BROWSER) htmlcov/index.html

85
Pillow help.md

@ -0,0 +1,85 @@ @@ -0,0 +1,85 @@
Image Class
-----------
convert(self, mode=None, matrix=None, dither=None, palette=0, colors=256)
Returns a converted copy of this image. For the "P" mode, this
method translates pixels through the palette. If mode is
omitted, a mode is chosen so that all information in the image
and the palette can be represented without a palette.
The current version supports all possible conversions between
"L", "RGB" and "CMYK." The **matrix** argument only supports "L"
and "RGB".
crop(self, box=None)
Returns a rectangular region from this image. The box is a
4-tuple defining the left, upper, right, and lower pixel
coordinate. See :ref:`coordinate-system`.
Note: Prior to Pillow 3.4.0, this was a lazy operation.
:param box: The crop rectangle, as a (left, upper, right, lower)-tuple.
:rtype: :py:class:`~PIL.Image.Image`
:returns: An :py:class:`~PIL.Image.Image` object.
getextrema(self)
Gets the the minimum and maximum pixel values for each band in
the image.
:returns: For a single-band image, a 2-tuple containing the
minimum and maximum pixel value. For a multi-band image,
a tuple containing one 2-tuple for each band.
histogram(self, mask=None, extrema=None)
Returns a histogram for the image. The histogram is returned as
a list of pixel counts, one for each pixel value in the source
image. If the image has more than one band, the histograms for
all bands are concatenated (for example, the histogram for an
"RGB" image contains 768 values).
save(self, fp, format=None, **params)
Saves this image under the given filename. If no format is
specified, the format to use is determined from the filename
extension, if possible.
Keyword options can be used to provide additional instructions
to the writer. If a writer doesn't recognise an option, it is
silently ignored. The available options are described in the
:doc:`image format documentation
<../handbook/image-file-formats>` for each writer.
show(self, title=None, command=None)
Displays this image. This method is mainly intended for
debugging purposes.
On Unix platforms, this method saves the image to a temporary
PPM file, and calls the **display**, **eog** or **xv**
utility, depending on which one can be found.
On macOS, this method saves the image to a temporary PNG file, and
opens it with the native Preview application.
ImageOps Module
---------------
The ImageOps module contains a number of ‘ready-made’ image processing
operations. This module is somewhat experimental, and most operators only work
on L and RGB images.
Only bug fixes have been added since the Pillow fork.
New in version 1.1.3.
PIL.ImageOps.autocontrast(image, cutoff=0, ignore=None)[source]
Maximize (normalize) image contrast. This function calculates a histogram
of the input image, removes cutoff percent of the lightest and darkest
pixels from the histogram, and remaps the image so that the darkest pixel
becomes black (0), and the lightest becomes white (255).
PIL.ImageOps.equalize(image, mask=None)[source]
Equalize the image histogram. This function applies a non-linear mapping to
the input image, in order to create a uniform distribution of grayscale
values in the output image.

21
mtor/__init__.py

@ -0,0 +1,21 @@ @@ -0,0 +1,21 @@
import warnings
from .prescan import prescan_workflow
from .imageproc import image_workflow
from .dataproc import data_workflow, cached_workflow
from .filesort import filesort_workflow
warnings.filterwarnings("ignore")
def run():
#settings = prescan_workflow("mtor-bilder", 50, 910, 300, 660)
settings = prescan_workflow("mtor-bilder", 50, 725+150, 300, 725)
stats_results = image_workflow(settings)
selected_files = data_workflow(stats_results, settings)
filesort_workflow(selected_files, settings)
def run_data():
selected_files, settings = cached_workflow("mtor-bilder")
filesort_workflow(selected_files, settings)

430
mtor/dataproc.py

@ -0,0 +1,430 @@ @@ -0,0 +1,430 @@
import numpy
import pandas
import pathlib
import peakutils
import pickle
import seaborn
import matplotlib.pyplot as pyplot
from peakutils.plot import plot as peakplot
from scipy.signal import savgol_filter
from .imageproc import STATS_FUNCS, LEFT_GUARD, ROI_NAME, RIGHT_GUARD
def peakplot_iloc(x, y, ind):
"""
Plots the original data with the peaks that were identified
The original peakutils.plot function imported as "peakplot" has a little
bug, when displaying dataframes with some values removed. The
implementation below fixes this.
Parameters
----------
x : array-like
Data on the x-axis
y : array-like
Data on the y-axis
ind : array-like
Indexes of the identified peaks
"""
pyplot.plot(x, y, "--")
pyplot.plot(
x.iloc[ind],
y.iloc[ind],
"r+",
ms=5,
mew=2,
label="{} peaks".format(len(ind)),
)
pyplot.legend()
def set_plotting_styles():
seaborn.set_style("darkgrid")
seaborn.set_style(
"ticks",
{
"legend.frameon": True,
"xtick.direction": "in",
"ytick.direction": "in",
"axes.linewidth": 2,
},
)
seaborn.set(rc={"figure.figsize": (12, 9)})
seaborn.set_context("talk")
def init_pandas_data_frame():
columns = ["file", "frame"]
for prefix in (LEFT_GUARD, ROI_NAME, RIGHT_GUARD):
for key in STATS_FUNCS:
columns.append(f"{prefix}.{key}")
return pandas.DataFrame(columns=columns)
def construct_data_frame(stats_results):
data_frame = init_pandas_data_frame()
data_frame = data_frame.append(stats_results, ignore_index=True)
return data_frame.sort_values(by=["frame"]).reset_index(drop=True)
def find_guard_value(data_frame, settings):
left_avg = data_frame["left.average"]
right_avg = data_frame["right.average"]
guards_avg = left_avg.append(right_avg, ignore_index=True)
guard_data = numpy.histogram(
guards_avg, bins=settings.guard_histogram_bins
)
guard_counts = guard_data[0].astype(numpy.float16)
guard_edges = guard_data[1][1:] # edges enclose the counts
pyplot.clf()
seaborn.lineplot(x=guard_edges, y=guard_counts)
pyplot.title("Histogram of Guard Avarages (not filtered)")
pyplot.xlabel("Average Intensity [au]")
pyplot.ylabel("Number of Observations [1]")
path = settings.data_dir / "1-histogram-of-guard-avarages-not-filtered.png"
pyplot.savefig(str(path))
guard_counts_filtered = savgol_filter(
guard_counts,
settings.guard_filter_window,
settings.guard_filter_polynom,
)
pyplot.clf()
seaborn.lineplot(x=guard_edges, y=guard_counts_filtered)
pyplot.title("Histogram of Guard Avarages (Savitzky-Golay filter)")
pyplot.xlabel("Average Intensity [au]")
pyplot.ylabel("Number of Observations [1]")
path = settings.data_dir / "2-histogram-of-guard-avarages-filtered.png"
pyplot.savefig(str(path))
# Finding the first minima after the first peak
# In a first step, the location of the first peak is determined.
# Since the algorithm only finds peaks, we need to inverse the data points
# and start after the first peak to find the first minima.
indexes = peakutils.indexes(guard_counts_filtered)
first_peak_position = indexes[0]
# looks like magic, but is standard numpy behaviour
inverted_series = max(guard_counts_filtered) - guard_counts_filtered
indexes = peakutils.indexes(
inverted_series[first_peak_position:],
min_dist=settings.guards_minima_min_dist,
)
# since we shortened the data, we need to add the first peak position
first_minima_position = indexes[0] + first_peak_position
settings.guard_max_value = guard_edges[first_minima_position]
pyplot.clf()
peakplot(guard_edges, guard_counts_filtered, [first_minima_position])
pyplot.title(
(
f"Histogram of Guard Avarages (Savitzky-Golay filter),"
f" first minima at {int(settings.guard_max_value)} au"
)
)
pyplot.xlabel("Average Intensity [au]")
pyplot.ylabel("Number of Observations [1]")
path = (
settings.data_dir
/ "3-histogram-of-guard-avarages-filtered-with-first-minima.png"
)
pyplot.savefig(str(path))
def check_guards(data_frame, settings):
for what in (LEFT_GUARD, RIGHT_GUARD):
ok_col = f"{what}.ok"
data_frame[ok_col] = False
mask = data_frame[f"{what}.average"] < settings.guard_max_value
data_frame.loc[mask, ok_col] = True
data_frame["guards.ok"] = (
data_frame[f"{LEFT_GUARD}.ok"] & data_frame[f"{RIGHT_GUARD}.ok"]
)
mask = data_frame["guards.ok"] == True
guarded_df = data_frame[mask].copy()
pyplot.clf()
ax = seaborn.scatterplot(
x="frame",
y=f"{ROI_NAME}.average",
data=data_frame,
hue="guards.ok",
hue_order=[True, False],
palette={True: "b", False: "r"},
)
pyplot.title("Selection based on guard values")
pyplot.ylabel("Average Intensity [au]")
pyplot.ylabel("Frame Number [1]")
path = settings.data_dir / "4-image-selection-based-on-guard-values.png"
pyplot.savefig(str(path))
settings.charts_y_limit = ax.get_ylim()
pyplot.clf()
ax = seaborn.scatterplot(
x="frame", y=f"{ROI_NAME}.average", data=guarded_df
)
count_all_images = len(data_frame)
count_guarded_images = len(guarded_df)
pyplot.title(
(
f"Selection, based on guard values"
f" ({count_guarded_images} of {count_all_images})"
)
)
pyplot.xlabel("Frame Number [1]")
pyplot.ylabel("Average Intensity [au]")
ax.set_ylim(settings.charts_y_limit)
path = settings.data_dir / "5-selected-values-based-on-guard-values.png"
pyplot.savefig(str(path))
return data_frame
def find_outliers(data_frame, settings):
mask = data_frame["guards.ok"] == True
guarded_df = data_frame[mask].copy()
pyplot.clf()
seaborn.boxplot(data=guarded_df, x=f"{ROI_NAME}.average")
pyplot.title(f"Boxblot of guarded values")
pyplot.xlabel("Average Intensity [au]")
path = settings.data_dir / "6-boxplot-of-guarded-values.png"
pyplot.savefig(str(path))
lower_quartil = guarded_df[f"{ROI_NAME}.average"].quantile(0.25)
upper_quartil = guarded_df[f"{ROI_NAME}.average"].quantile(0.75)
inter_quartil_range = upper_quartil - lower_quartil
settings.outlier_upper_limit = upper_quartil + 1.5 * inter_quartil_range
data_frame["outlier.ok"] = (
data_frame[f"{ROI_NAME}.average"] < settings.outlier_upper_limit
)
return data_frame
def select_value_on_guards_and_outliers(data_frame, settings):
data_frame["outlier_guards.ok"] = (
data_frame["guards.ok"] & data_frame["outlier.ok"]
)
mask = data_frame["outlier_guards.ok"] == True
selected_df = data_frame[mask].copy()
pyplot.clf()
ax = seaborn.scatterplot(
x="frame", y=f"{ROI_NAME}.average", data=selected_df
)
pyplot.title(f"Selected Images, outliers removed")
pyplot.xlabel("Frame Number [1]")
pyplot.ylabel("Average Intensity [au]")
ax.set_ylim(settings.charts_y_limit)
path = settings.data_dir / "7-selected-images-outliers-removed.png"
pyplot.savefig(str(path))
return selected_df
def smooth_rolling_min(selected_df, settings):
rm = (
selected_df[f"{ROI_NAME}.average"]
.rolling(settings.rolling_min_window)
.min()
)
# after a rolling window calculation, the first values will be NaN,
# we need to fill them
selected_df[f"{ROI_NAME}.average.rolling.min"] = rm.fillna(
method="backfill"
)
pyplot.clf()
ax = seaborn.scatterplot(
x="frame", y=f"{ROI_NAME}.average.rolling.min", data=selected_df
)
pyplot.title(f"Selected Images, outliers removed, rolling min applied")
pyplot.xlabel("Frame Number [1]")
pyplot.ylabel("Average Intensity [au]")
ax.set_ylim(settings.charts_y_limit)
path = (
settings.data_dir
/ "8-selected-images-outliers-removed-rolling-min-applied.png"
)
pyplot.savefig(str(path))
return selected_df
def smooth_savgol_filter(selected_df, settings):
filtered = savgol_filter(
selected_df[f"{ROI_NAME}.average.rolling.min"],
settings.savgol_filter_window,
settings.savgol_filter_polynom,
)
selected_df[f"{ROI_NAME}.average.savgol"] = filtered
pyplot.clf()
seaborn.lineplot(
x="frame", y=f"{ROI_NAME}.average.savgol", data=selected_df
)
pyplot.title(
(
f"Selected Images, outliers removed,"
f" rolling min applied, Savitzky-Golay filtered"
)
)
pyplot.xlabel("Frame Number [1]")
pyplot.ylabel("Average Intensity [au]")
path = (
settings.data_dir
/ "9-selected-images-outliers-removed-rolling-min-savgol-filtered.png"
)
pyplot.savefig(str(path))
return selected_df
def find_extremas(selected_df, settings):
indexes = peakutils.indexes(
selected_df[f"{ROI_NAME}.average.savgol"],
thres=settings.peak_threshold,
min_dist=settings.peak_min_distance,
)
maximas = selected_df.iloc[indexes].copy()
pyplot.clf()
peakplot_iloc(
selected_df["frame"],
selected_df[f"{ROI_NAME}.average.savgol"],
indexes,
)
pyplot.title(f"Finding Maximas")
pyplot.xlabel("Frame Number [1]")
pyplot.ylabel("Average Intensity [au]")
path = settings.data_dir / "10-finding-maximas.png"
pyplot.savefig(str(path))
inverted_series = (
max(selected_df[f"{ROI_NAME}.average.savgol"])
- selected_df[f"{ROI_NAME}.average.savgol"]
)
indexes = peakutils.indexes(
inverted_series, min_dist=settings.peak_min_distance
)
minimas = selected_df.iloc[indexes].copy()
pyplot.clf()
peakplot_iloc(
selected_df["frame"],
selected_df[f"{ROI_NAME}.average.savgol"],
indexes,
)
pyplot.title(f"Finding Minimas")
pyplot.xlabel("Frame Number [1]")
pyplot.ylabel("Average Intensity [au]")
path = settings.data_dir / "11-finding-minimas.png"
pyplot.savefig(str(path))
maximas["is_maxima"] = True
minimas["is_maxima"] = False
extremas_df = pandas.concat([maximas, minimas]).sort_index()
return extremas_df
def save_data(data_frame, selected_df, extremas_df, settings):
path = settings.data_dir / "numeric-data.xlsx"
writer = pandas.ExcelWriter(path, engine="xlsxwriter")
extremas_df.to_excel(writer, sheet_name="extremas")
selected_df.to_excel(writer, sheet_name="selected data")
data_frame.to_excel(writer, sheet_name="raw data")
ignore_settings = {"tif_list", "cuts_dir"}
tmp_settings = {
k: [v] for k, v in settings.items() if k not in ignore_settings
}
tmp_setings_df = pandas.DataFrame(tmp_settings).T
tmp_setings_df.to_excel(writer, sheet_name="parameters")
writer.save()
def data_workflow(stats_results, settings):
print("3/4: Data analysis")
set_plotting_styles()
data_frame = construct_data_frame(stats_results)
save_temp(data_frame, settings)
find_guard_value(data_frame, settings)
data_frame = check_guards(data_frame, settings)
data_frame = find_outliers(data_frame, settings)
selected_df = select_value_on_guards_and_outliers(data_frame, settings)
selected_df = smooth_rolling_min(selected_df, settings)
selected_df = smooth_savgol_filter(selected_df, settings)
extremas_df = find_extremas(selected_df, settings)
save_data(data_frame, selected_df, extremas_df, settings)
return list(selected_df["file"])
def save_temp(data_frame, settings):
csv_path = settings.tif_dir / "_data.csv"
data_frame.to_csv(csv_path, sep="\t")
xls_path = settings.tif_dir / "_data.xlsx"
data_frame.to_excel(xls_path)
settings_path = settings.tif_dir / "_settings.pickle"
with open(settings_path, "wb") as fh:
pickle.dump(settings, fh)
def load_temp(folder):
dir_path = pathlib.Path(folder)
csv_path = dir_path / "_data.csv"
data_frame = pandas.read_csv(csv_path, sep="\t", index_col=0)
settings_path = dir_path / "_settings.pickle"
with open(settings_path, "rb") as fh:
settings = pickle.load(fh)
settings.cut_pad_x = 50
settings.cut_pad_y = 25
settings.guard_histogram_bins = 100
settings.guard_filter_window = 7
settings.guard_filter_polynom = 3
settings.guards_minima_min_dist = 10
settings.rolling_min_window = 5
settings.savgol_filter_window = 51
settings.savgol_filter_polynom = 1
settings.peak_min_distance = 5
settings.peak_threshold = 0.3
return data_frame, settings
def cached_workflow(folder):
print("3/4: Data analysis")
set_plotting_styles()
data_frame, settings = load_temp(folder)
find_guard_value(data_frame, settings)
data_frame = check_guards(data_frame, settings)
data_frame = find_outliers(data_frame, settings)
selected_df = select_value_on_guards_and_outliers(data_frame, settings)
selected_df = smooth_rolling_min(selected_df, settings)
selected_df = smooth_savgol_filter(selected_df, settings)
extremas_df = find_extremas(selected_df, settings)
save_data(data_frame, selected_df, extremas_df, settings)
return list(selected_df["file"]), settings

52
mtor/filesort.py

@ -0,0 +1,52 @@ @@ -0,0 +1,52 @@
import pathlib
from tqdm import tqdm
from .prescan import LABEL_SELECTED, LABEL_DISCARDED
def stem_file_list(selected_files):
return {pathlib.Path(filename).stem for filename in selected_files}
def rename_color_coded_images(file_stems, settings):
rename_pairs = []
for path in settings.colored_dir.iterdir():
if not path.stem.startswith("."):
label = (
LABEL_SELECTED if path.stem in file_stems else LABEL_DISCARDED
)
new_name = path.with_name(f"{path.stem}_{label}{path.suffix}")
rename_pairs.append((path, new_name))
return rename_pairs
def sort_cut_images(file_stems, settings):
sort_pairs = []
for path in settings.cuts_dir.iterdir():
if not path.stem.startswith("."):
stem = path.stem[:-4] # removes the "_cut" suffix
label = LABEL_SELECTED if stem in file_stems else LABEL_DISCARDED
new_path = (
settings[f"cuts_{label}_dir"]
/ f"{path.stem}_{label}{path.suffix}"
)
sort_pairs.append((path, new_path))
return sort_pairs
def remove_cuts_dir(settings):
for item in settings["cuts_dir"].iterdir():
item.unlink()
settings["cuts_dir"].rmdir()
def filesort_workflow(selected_files, settings):
print("4/4: Sorting images")
file_stems = stem_file_list(selected_files)
cc_rename_pairs = rename_color_coded_images(file_stems, settings)
cut_sort_pairs = sort_cut_images(file_stems, settings)
file_pairs = cc_rename_pairs + cut_sort_pairs
for old_path, new_path in tqdm(file_pairs):
old_path.rename(new_path)
remove_cuts_dir(settings)

122
mtor/imageproc.py

@ -0,0 +1,122 @@ @@ -0,0 +1,122 @@
from collections import namedtuple
from functools import partial
from multiprocessing import Pool
from PIL import Image
from tqdm import tqdm
import numpy
from .luts import FIRE_LUT
STATS_FUNCS = {
"min": numpy.amin,
"max": numpy.amax,
"1quantile": partial(numpy.percentile, q=25),
"2quantile": partial(numpy.percentile, q=50),
"3quantile": partial(numpy.percentile, q=75),
"average": numpy.average,
"median": numpy.median,
"std": numpy.std,
}
LEFT_GUARD = "left"
ROI_NAME = "roi"
RIGHT_GUARD = "right"
ROI_BORDER_INTENSITY = 2**15
TifArray = namedtuple("TifArray", ["path", "frame", "data"])
def open_as_array(tif_image):
with tif_image.path.open("rb") as filehandle:
image = Image.open(filehandle)
image_array = numpy.array(image, dtype=numpy.int32)
return TifArray(
path=tif_image.path, frame=tif_image.frame, data=image_array
)
def scale_and_colorize(tif_array, settings):
adjusted_array = (tif_array.data - settings.offset) // settings.scale
# paint roi
top = settings.roi_top
bottom = settings.roi_bottom
right = settings.roi_right
left = settings.roi_left
adjusted_array[top, left:right] = ROI_BORDER_INTENSITY
adjusted_array[bottom, left:right] = ROI_BORDER_INTENSITY
adjusted_array[top:bottom, left] = ROI_BORDER_INTENSITY
adjusted_array[top:bottom, right] = ROI_BORDER_INTENSITY
adjusted = Image.fromarray(adjusted_array)
converted = adjusted.convert(mode="L").convert(mode="P")
converted.putpalette(FIRE_LUT)
return converted.convert("RGB")
def safe_color_coded(tif_array, settings):
png_image = scale_and_colorize(tif_array, settings)
png_path = settings.colored_dir / (tif_array.path.stem + ".jpg")
with png_path.open("wb") as outhandle:
png_image.save(outhandle)
del png_image
def safe_cut(tif_array, settings):
cut_path = settings.cuts_dir / (tif_array.path.stem + "_cut.tif")
top = settings.cut_top
bottom = settings.cut_bottom
right = settings.cut_right
left = settings.cut_left
with cut_path.open("wb") as outhandle:
cut_array = tif_array.data[top:bottom, left:right]
cut_img = Image.fromarray(cut_array)
cut_img.save(outhandle)
del cut_img
def get_roi(tif_array, settings):
top = settings.roi_top
bottom = settings.roi_bottom
right = settings.roi_right
left = settings.roi_left
return tif_array.data[top:bottom, left:right]
def analyse_roi(tif_array, settings):
data = get_roi(tif_array, settings)
results = {}
results.update(get_stats(data[:, 1], LEFT_GUARD))
results.update(get_stats(data[:, 1:-1], ROI_NAME))
results.update(get_stats(data[:, -1], RIGHT_GUARD))
return results
def get_stats(array, prefix):
return {f"{prefix}.{k}": f(array) for k, f in STATS_FUNCS.items()}
def process_one_tif(tif_image, settings):
tif_array = open_as_array(tif_image)
safe_color_coded(tif_array, settings)
safe_cut(tif_array, settings)
stats_result = analyse_roi(tif_array, settings)
stats_result["file"] = tif_image.path.name
stats_result["frame"] = tif_image.frame
return stats_result
def image_workflow(settings):
print("2/4: Image analysis and conversion")
func = partial(process_one_tif, settings=settings)
tif_list = settings.tif_list
num_items = len(tif_list)
with Pool(4) as p:
stat_results = list(tqdm(p.imap(func, tif_list), total=num_items))
return stat_results

770
mtor/luts.py

@ -0,0 +1,770 @@ @@ -0,0 +1,770 @@
FIRE_LUT = [
0,
0,
0,
0,
0,
7,
0,
0,
15,
0,
0,
22,
0,
0,
30,
0,
0,
38,
0,
0,
45,
0,
0,
53,
0,
0,
61,
0,
0,
65,
0,
0,
69,
0,
0,
74,
0,
0,
78,
0,
0,
82,
0,
0,
87,
0,
0,
91,
1,
0,
96,
4,
0,
100,
7,
0,
104,
10,
0,
108,
13,
0,
113,
16,
0,
117,
19,
0,
121,
22,
0,
125,
25,
0,
130,
28,
0,
134,
31,
0,
138,
34,
0,
143,
37,
0,
147,
40,
0,
151,
43,
0,
156,
46,
0,
160,
49,
0,
165,
52,
0,
168,
55,
0,
171,
58,
0,
175,
61,
0,
178,
64,
0,
181,
67,
0,
185,
70,
0,
188,
73,
0,
192,
76,
0,
195,
79,
0,
199,
82,
0,
202,
85,
0,
206,
88,
0,
209,
91,
0,
213,
94,
0,
216,
98,
0,
220,
101,
0,
220,
104,
0,
221,
107,
0,
222,
110,
0,
223,
113,
0,
224,
116,
0,
225,
119,
0,
226,
122,
0,
227,
125,
0,
224,
128,
0,
222,
131,
0,
220,
134,
0,
218,
137,
0,
216,
140,
0,
214,
143,
0,
212,
146,
0,
210,
148,
0,
206,
150,
0,
202,
152,
0,
199,
154,
0,
195,
156,
0,
191,
158,
0,
188,
160,
0,
184,
162,
0,
181,
163,
0,
177,
164,
0,
173,
166,
0,
169,
167,
0,
166,
168,
0,
162,
170,
0,
158,
171,
0,
154,
173,
0,
151,
174,
0,
147,
175,
0,
143,
177,
0,
140,
178,
0,
136,
179,
0,
132,
181,
0,
129,
182,
0,
125,
184,
0,
122,
185,
0,
118,
186,
0,
114,
188,
0,
111,
189,
0,
107,
190,
0,
103,
192,
0,
100,
193,
0,
96,
195,
0,
93,
196,
1,
89,
198,
3,
85,
199,
5,
82,
201,
7,
78,
202,
8,
74,
204,
10,
71,
205,
12,
67,
207,
14,
64,
208,
16,
60,
209,
19,
56,
210,
21,
53,
212,
24,
49,
213,
27,
45,
214,
29,
42,
215,
32,
38,
217,
35,
35,
218,
37,
31,
220,
40,
27,
221,
43,
23,
223,
46,
20,
224,
48,
16,
226,
51,
12,
227,
54,
8,
229,
57,
5,
230,
59,
4,
231,
62,
3,
233,
65,
3,
234,
68,
2,
235,
70,
1,
237,
73,
1,
238,
76,
0,
240,
79,
0,
241,
81,
0,
243,
84,
0,
244,
87,
0,
246,
90,
0,
247,
92,
0,
249,
95,
0,
250,
98,
0,
252,
101,
0,
252,
103,
0,
252,
105,
0,
253,
107,
0,
253,
109,
0,
253,
111,
0,
254,
113,
0,
254,
115,
0,
255,
117,
0,
255,
119,
0,
255,
121,
0,
255,
123,
0,
255,
125,
0,
255,
127,
0,
255,
129,
0,
255,
131,
0,
255,
133,
0,
255,
134,
0,
255,
136,
0,
255,
138,
0,
255,
140,
0,
255,
141,
0,
255,
143,
0,
255,
145,
0,
255,
147,
0,
255,
148,
0,
255,
150,
0,
255,
152,
0,
255,
154,
0,
255,
155,
0,
255,
157,
0,
255,
159,
0,
255,
161,
0,
255,
162,
0,
255,
164,
0,
255,
166,
0,
255,
168,
0,
255,
169,
0,
255,
171,
0,
255,
173,
0,
255,
175,
0,
255,
176,
0,
255,
178,
0,
255,
180,
0,
255,
182,
0,
255,
184,
0,
255,
186,
0,
255,
188,
0,
255,
190,
0,
255,
191,
0,
255,
193,
0,
255,
195,
0,
255,
197,
0,
255,
199,
0,
255,
201,
0,
255,
203,
0,
255,
205,
0,
255,
206,
0,
255,
208,
0,
255,
210,
0,
255,
212,
0,
255,
213,
0,
255,
215,
0,
255,
217,
0,
255,
219,
0,
255,
220,
0,
255,
222,
0,
255,
224,
0,
255,
226,
0,
255,
228,
0,
255,
230,
0,
255,
232,
0,
255,
234,
0,
255,
235,
4,
255,
237,
8,
255,
239,
13,
255,
241,
17,
255,
242,
21,
255,
244,
26,
255,
246,
30,
255,
248,
35,
255,
248,
42,
255,
249,
50,
255,
250,
58,
255,
251,
66,
255,
252,
74,
255,
253,
82,
255,
254,
90,
255,
255,
98,
255,
255,
105,
255,
255,
113,
255,
255,
121,
255,
255,
129,
255,
255,
136,
255,
255,
144,
255,
255,
152,
255,
255,
160,
255,
255,
167,
255,
255,
175,
255,
255,
183,
255,
255,
191,
255,
255,
199,
255,
255,
207,
255,
255,
215,
255,
255,
223,
255,
255,
227,
255,
255,
231,
255,
255,
235,
255,
255,
239,
255,
255,
243,
255,
255,
247,
255,
255,
251,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
255,
]

163
mtor/prescan.py

@ -0,0 +1,163 @@ @@ -0,0 +1,163 @@
import re
from collections import namedtuple
from pathlib import Path
from PIL import Image
from tqdm import tqdm
RE_DIGITS = re.compile(r"(\d+)")
LABEL_SELECTED = "selected"
LABEL_DISCARDED = "discarded"
TifImage = namedtuple("TifImage", ["path", "frame"])
class Settings(dict):
""" namespacing the constant values for a analysis with nice access """
def __getattr__(self, key):
""" return a settings value as instance property """
return self[key]
def __setattr__(self, key, value):
""" set a settings value via instance property """
self[key] = value
def __getstate__(self):
# Copy the object's state from self.__dict__ which contains
# all our instance attributes. Always use the dict.copy()
# method to avoid modifying the original state.
return self.__dict__.copy()
def __setstate__(self, state):
# Restore instance attributes (i.e., filename and lineno).
self.__dict__.update(state)
def scan_image_dir(settings):
""" scan a folder for tif images """
tif_dir = Path(settings.folder)
if not tif_dir.is_dir():
raise IOError(f"Not a directory: {settings.folder}")
visible = (i for i in tif_dir.iterdir() if not i.stem.startswith("."))
tifs = (item for item in visible if item.suffix == ".tif")
results = []
for path in tifs:
try:
match = RE_DIGITS.search(path.name).group()
except AttributeError as ex:
raise ValueError(
f"no sequence number found in {path.name}"
) from ex
results.append(TifImage(path=path, frame=int(match)))
settings.tif_dir = tif_dir
settings.tif_list = results
return settings
def prepare_output_dirs(settings):
dir_names = (
"colored",
"cuts",
f"cuts_{LABEL_DISCARDED}",
f"cuts_{LABEL_SELECTED}",
"data",
)
for subdir in dir_names:
sub_path = settings.tif_dir / subdir
sub_path.mkdir(exist_ok=True)
for item in sub_path.iterdir():
item.unlink()
settings[f"{subdir}_dir"] = sub_path
return settings
def pre_scan_tifs(settings):
""" scan a sequence of tif images for common values """
lowest = 2 ** 16
highest = 0
width = None
height = None
print("1/4: scanning tifs for common autocontrast values")
for tif in tqdm(settings.tif_list):
with tif.path.open("rb") as filehandle:
image = Image.open(filehandle)
mi, ma = image.getextrema()
lowest = min(lowest, mi)
highest = max(highest, ma)
if width is None:
width, height = image.size
elif width != image.width or height != image.height:
raise ValueError("Images have different sizes")
print(f"intensity range: {lowest} to {highest}")
print(f"image size: {width} x {height} pixels")
settings.intensity_min = lowest
settings.intensity_max = highest
settings.image_width = width
settings.image_height = height
return settings
def check_roi_size(settings, top, right, bottom, left):
if (
top > settings.image_height
or bottom > settings.image_height
or right > settings.image_width
or left > settings.image_width
):
raise ValueError("ROI out of bounce")
if right < left:
left, right = right, left
if bottom < top:
top, bottom = bottom, top
settings.roi_top = top
settings.roi_bottom = bottom
settings.roi_right = right
settings.roi_left = left
settings.cut_top = min(settings.image_height, top - settings.cut_pad_y)
settings.cut_bottom = max(0, bottom + settings.cut_pad_y)
settings.cut_right = min(settings.image_width, right + settings.cut_pad_x)
settings.cut_left = max(0, left - settings.cut_pad_x)
return settings
def prescan_workflow(folder, top, right, bottom, left, boost=10, **kargs):
settings = Settings()
settings.folder = folder
settings.boost = boost
settings.cut_pad_x = 50
settings.cut_pad_y = 25
settings.guard_histogram_bins = 100
settings.guard_filter_window = 7
settings.guard_filter_polynom = 3
settings.guards_minima_min_dist = 10
settings.rolling_min_window = 5
settings.savgol_filter_window = 51
settings.savgol_filter_polynom = 1
settings.peak_min_distance = 5
settings.peak_threshold = 0.3
settings.update(kargs)
settings = scan_image_dir(settings)
settings = prepare_output_dirs(settings)
settings = pre_scan_tifs(settings)
settings = check_roi_size(settings, top, right, bottom, left)
settings.offset = settings.intensity_min
max_offset = settings.intensity_max - settings.intensity_min
settings.scale = max_offset / 255 / boost
return settings

13
pyproject.toml

@ -0,0 +1,13 @@ @@ -0,0 +1,13 @@
[tool.black]
line-length = 79
py37 = true
include = '\.pyi?$'
exclude = '''
/(
\.git
| \.tox
| \.venv
| build
| dist
)/
'''

316
test.py

@ -0,0 +1,316 @@ @@ -0,0 +1,316 @@
from pathlib import Path
from PIL import Image, ImageOps
from tqdm import tqdm
import numpy
import skimage
import warnings
FIRE_LUT = [
0, 0, 0,
0, 0, 7,
0, 0, 15,
0, 0, 22,
0, 0, 30,
0, 0, 38,
0, 0, 45,
0, 0, 53,
0, 0, 61,
0, 0, 65,
0, 0, 69,
0, 0, 74,
0, 0, 78,
0, 0, 82,
0, 0, 87,
0, 0, 91,
1, 0, 96,
4, 0, 100,
7, 0, 104,
10, 0, 108,
13, 0, 113,
16, 0, 117,
19, 0, 121,
22, 0, 125,
25, 0, 130,
28, 0, 134,
31, 0, 138,
34, 0, 143,
37, 0, 147,
40, 0, 151,
43, 0, 156,
46, 0, 160,
49, 0, 165,
52, 0, 168,
55, 0, 171,
58, 0, 175,
61, 0, 178,
64, 0, 181,
67, 0, 185,
70, 0, 188,
73, 0, 192,
76, 0, 195,
79, 0, 199,
82, 0, 202,
85, 0, 206,
88, 0, 209,
91, 0, 213,
94, 0, 216,
98, 0, 220,
101, 0, 220,
104, 0, 221,
107, 0, 222,
110, 0, 223,
113, 0, 224,
116, 0, 225,
119, 0, 226,
122, 0, 227,
125, 0, 224,
128, 0, 222,
131, 0, 220,
134, 0, 218,
137, 0, 216,
140, 0, 214,
143, 0, 212,
146, 0, 210,
148, 0, 206,
150, 0, 202,
152, 0, 199,
154, 0, 195,
156, 0, 191,
158, 0, 188,
160, 0, 184,
162, 0, 181,
163, 0, 177,
164, 0, 173,
166, 0, 169,
167, 0, 166,
168, 0, 162,
170, 0, 158,
171, 0, 154,
173, 0, 151,
174, 0, 147,
175, 0, 143,
177, 0, 140,
178, 0, 136,
179, 0, 132,
181, 0, 129,
182, 0, 125,
184, 0, 122,
185, 0, 118,
186, 0, 114,
188, 0, 111,
189, 0, 107,
190, 0, 103,
192, 0, 100,
193, 0, 96,
195, 0, 93,
196, 1, 89,
198, 3, 85,
199, 5, 82,
201, 7, 78,
202, 8, 74,
204, 10, 71,
205, 12, 67,
207, 14, 64,
208, 16, 60,
209, 19, 56,
210, 21, 53,
212, 24, 49,
213, 27, 45,
214, 29, 42,
215, 32, 38,
217, 35, 35,
218, 37, 31,
220, 40, 27,
221, 43, 23,
223, 46, 20,
224, 48, 16,
226, 51, 12,
227, 54, 8,
229, 57, 5,
230, 59, 4,
231, 62, 3,
233, 65, 3,
234, 68, 2,
235, 70, 1,
237, 73, 1,
238, 76, 0,
240, 79, 0,
241, 81, 0,
243, 84, 0,
244, 87, 0,
246, 90, 0,
247, 92, 0,
249, 95, 0,
250, 98, 0,
252, 101, 0,
252, 103, 0,
252, 105, 0,
253, 107, 0,
253, 109, 0,
253, 111, 0,
254, 113, 0,
254, 115, 0,
255, 117, 0,
255, 119, 0,
255, 121, 0,
255, 123, 0,
255, 125, 0,
255, 127, 0,
255, 129, 0,
255, 131, 0,
255, 133, 0,
255, 134, 0,
255, 136, 0,
255, 138, 0,
255, 140, 0,
255, 141, 0,
255, 143, 0,
255, 145, 0,
255, 147, 0,
255, 148, 0,
255, 150, 0,
255, 152, 0,
255, 154, 0,
255, 155, 0,
255, 157, 0,
255, 159, 0,
255, 161, 0,
255, 162, 0,
255, 164, 0,
255, 166, 0,
255, 168, 0,
255, 169, 0,
255, 171, 0,
255, 173, 0,
255, 175, 0,
255, 176, 0,
255, 178, 0,
255, 180, 0,
255, 182, 0,
255, 184, 0,
255, 186, 0,
255, 188, 0,
255, 190, 0,
255, 191, 0,
255, 193, 0,
255, 195, 0,
255, 197, 0,
255, 199, 0,
255, 201, 0,
255, 203, 0,
255, 205, 0,
255, 206, 0,
255, 208, 0,
255, 210, 0,
255, 212, 0,
255, 213, 0,
255, 215, 0,
255, 217, 0,
255, 219, 0,
255, 220, 0,
255, 222, 0,
255, 224, 0,
255, 226, 0,
255, 228, 0,
255, 230, 0,
255, 232, 0,
255, 234, 0,
255, 235, 4,
255, 237, 8,
255, 239, 13,
255, 241, 17,
255, 242, 21,
255, 244, 26,
255, 246, 30,
255, 248, 35,
255, 248, 42,
255, 249, 50,
255, 250, 58,
255, 251, 66,
255, 252, 74,
255, 253, 82,
255, 254, 90,
255, 255, 98,
255, 255, 105,
255, 255, 113,
255, 255, 121,
255, 255, 129,
255, 255, 136,
255, 255, 144,
255, 255, 152,
255, 255, 160,
255, 255, 167,
255, 255, 175,
255, 255, 183,
255, 255, 191,
255, 255, 199,
255, 255, 207,
255, 255, 215,
255, 255, 223,
255, 255, 227,
255, 255, 231,
255, 255, 235,
255, 255, 239,
255, 255, 243,
255, 255, 247,
255, 255, 251,
255, 255, 255,
255, 255, 255,
255, 255, 255,
255, 255, 255,
255, 255, 255,
255, 255, 255,
255, 255, 255,
255, 255, 255,
]
tif_dir = Path("original_tifs")
png_dir = Path("auto_pngs")
for item in png_dir.iterdir():
item.unlink()
visible_items = (i for i in tif_dir.iterdir() if not i.stem.startswith("."))
tifs = [i for i in visible_items if i.suffix == ".tif"]
lowest = 2**16
lowest = 309
highest = 0
highest = 23922//4
print("1/3: scanning tifs for common autocontrast values")
for path in tqdm(tifs):
break
with path.open("rb") as filehandle:
image = Image.open(filehandle)
mi, ma = image.getextrema()
lowest = min(lowest, mi)
highest = max(highest, ma)
print("lowest intensity found: ", lowest)
print("highest intensity found: ", highest)
scaling_factor = highest / 255
print("2/3: adjusting contrast, converting to png")
for path in tqdm(tifs):
break
with path.open("rb") as filehandle:
image = Image.open(filehandle)
image_array = numpy.array(image, dtype=numpy.int32)
converted = Image.fromarray((image_array - lowest) // scaling_factor)
adjusted = converted.convert(mode="L").convert(mode="P")
adjusted.putpalette(FIRE_LUT)
png_path = png_dir / (path.stem + ".png")
with png_path.open("wb") as outhandle:
adjusted.save(outhandle)
cut = image_array[50:300,660:910]
import mtor
print(mtor.analyse_cut(cut))
#x = 660, 50,
#w, h = 250, 250

9
test_mtor.py

@ -0,0 +1,9 @@ @@ -0,0 +1,9 @@
import mtor
tif_dir = "original_tifs"
#mtor.process_tifs(tif_dir, 50, 910, 300, 660, boost=5)
mtor.run()
#mtor.run_data()
Loading…
Cancel
Save