Browse Source

showing print parameters on report

sx_rewrite
Holger Frey 5 years ago
parent
commit
03ec2095de
  1. 3
      .gitignore
  2. 1
      s3printlog/__init__.py
  3. 23
      s3printlog/gui.py
  4. 68
      s3printlog/logparser.py
  5. 71
      s3printlog/main.py
  6. 35
      s3printlog/report.py
  7. 12
      test.py

3
.gitignore vendored

@ -1,6 +1,9 @@ @@ -1,6 +1,9 @@
# example data
example 1/
example 2/
example fail 1/
example fail 2/
example fail 3/
# ---> Python
# Byte-compiled / optimized / DLL files

1
s3printlog/__init__.py

@ -1 +1,2 @@ @@ -1 +1,2 @@
from . import gui
from . import main

23
s3printlog/gui.py

@ -5,8 +5,7 @@ import tkinter.ttk as ttk @@ -5,8 +5,7 @@ import tkinter.ttk as ttk
from pathlib import Path
from tkinter import filedialog
from .logparser import get_log_files, DROP_CHECK_SUFFIX, ENVIRONMENT_SUFFIX
from .main import process_log_folder, open_with_default_app
from .main import get_log_files, process_log_folder, open_with_default_app
if getattr(sys, "frozen", False):
@ -20,16 +19,6 @@ else: @@ -20,16 +19,6 @@ else:
icon_path = None
def validate_folder(folder):
dir_path = Path(folder)
drop_checks = get_log_files(dir_path, DROP_CHECK_SUFFIX)
env_log = get_log_files(dir_path, ENVIRONMENT_SUFFIX)
if drop_checks and env_log:
return folder
else:
return None
class StatusPanel(tk.Frame):
def __init__(self, parent):
tk.Frame.__init__(self, parent.master)
@ -89,7 +78,7 @@ class Application(tk.Frame): @@ -89,7 +78,7 @@ class Application(tk.Frame):
master.minsize(height=150, width=400)
tk.Frame.__init__(self, master)
self._master = master
self.selected_folder = None
self.log_files = None
self.pack(fill=tk.BOTH)
self.file_panel = FilePanel(self)
self.action_panel = ActionPanel(self)
@ -105,12 +94,12 @@ class Application(tk.Frame): @@ -105,12 +94,12 @@ class Application(tk.Frame):
opts = {"initialdir": "~/Desktop", "mustexist": True}
selection = tk.filedialog.askdirectory(**opts)
if selection:
self.selected_folder = validate_folder(selection)
self.log_files = get_log_files(selection)
self.set_active_state()
def set_active_state(self, event=None):
if self.selected_folder is not None:
self.file_panel.set_text(self.selected_folder)
if self.log_files:
self.file_panel.set_text(self.log_files.folder)
self.action_panel.enable()
else:
self.file_panel.set_text("This is not a S3 Print Log Folder")
@ -123,7 +112,7 @@ class Application(tk.Frame): @@ -123,7 +112,7 @@ class Application(tk.Frame):
self.status_panel.set_text(
"Generating report, PDF should be opened in a couple of seconds"
)
report_file = process_log_folder(self.selected_folder)
report_file = process_log_folder(self.log_files)
open_with_default_app(report_file)
self.status_panel.set_text("Report Generated.")

68
s3printlog/logparser.py

@ -10,8 +10,8 @@ from collections import namedtuple @@ -10,8 +10,8 @@ from collections import namedtuple
from enum import Enum
from io import StringIO
DROP_CHECK_SUFFIX = ".cor"
ENVIRONMENT_SUFFIX = "_Logfile.log"
PrintLogResult = namedtuple("PrintLogResult", ["environment", "info"])
class CheckWhen(Enum):
@ -25,7 +25,7 @@ class CheckResult(Enum): @@ -25,7 +25,7 @@ class CheckResult(Enum):
SKIPPED = "skipped"
class LogResult:
class DropCheckResult:
def __init__(
self,
path,
@ -101,18 +101,13 @@ def parse_log_line(line, cast_to, default_value=np.nan, separator="="): @@ -101,18 +101,13 @@ def parse_log_line(line, cast_to, default_value=np.nan, separator="="):
return parse_str_value(str_data, cast_to, default_value)
def get_log_files(folder, suffix=".cor"):
visible = (p for p in folder.iterdir() if not p.name.startswith("."))
return [p for p in visible if p.name.endswith(suffix)]
def parse_log_files(log_list):
pre_run = dict()
post_run = dict()
well_list = list()
# use the files sorted by date and time
for path in sorted(log_list):
log_result = LogResult.from_file(path)
log_result = DropCheckResult.from_file(path)
if log_result.well not in pre_run:
log_result.when = CheckWhen.PRE_RUN
pre_run[log_result.well] = log_result
@ -125,7 +120,9 @@ def parse_log_files(log_list): @@ -125,7 +120,9 @@ def parse_log_files(log_list):
skipped_runs = {well for well in pre_run if well not in post_run}
for well in skipped_runs:
post_result = LogResult("", well, CheckResult.SKIPPED, when=CheckWhen.POST_RUN)
post_result = DropCheckResult(
"", well, CheckResult.SKIPPED, when=CheckWhen.POST_RUN
)
post_run[well] = post_result
parsed_files = []
@ -136,12 +133,53 @@ def parse_log_files(log_list): @@ -136,12 +133,53 @@ def parse_log_files(log_list):
return pd.DataFrame([pf.as_dict() for pf in parsed_files])
def parse_environment_log(log_file_path):
with open(log_file_path, "r", encoding="iso-8859-1") as file_handle:
env_lines = [l for l in file_handle if "\tHumidity=\t" in l]
def split_print_log_line(line):
_, value = line.split(":", 1)
return value.strip()
def count_solutions(file_handle):
solutions = set()
for line in file_handle:
line = line.strip()
if not line or line[0] in ("X", "Y", "F", "["):
# empty line or uninteresting one, pick next one
continue
elif line.startswith("Drops/Field"):
# finished with all field definition, leave loop
break
entries = (item.strip() for item in line.split("\t"))
wells = (well for well in entries if well)
solutions.update(wells)
return len(solutions)
def parse_print_log(log_files):
env_lines = []
print_info = {}
with open(log_files, "r", encoding="iso-8859-1") as file_handle:
for line in file_handle:
if "\tHumidity=\t" in line:
env_lines.append(line)
elif line.startswith("Probe:"):
print_info["source"] = split_print_log_line(line)
elif line.startswith("Target:"):
target_and_fields = split_print_log_line(line)
target, fields = target_and_fields.rsplit(":", 1)
print_info["target"] = target.strip()
print_info["fields"] = len(fields.split(","))
elif line.startswith("Humidity:"):
print_info["humidity"] = split_print_log_line(line)
elif line.startswith("Run Name:"):
print_info["run"] = split_print_log_line(line)
elif line.startswith("Dot Pitch:"):
# important to pass the filehandle iterator here
print_info["solutions"] = count_solutions(file_handle)
buff = StringIO("".join(env_lines))
columns = ["datetime", "garbage 1", "humidity", "garbage 2", "temperature"]
df = pd.read_csv(
tmp_df = pd.read_csv(
buff, sep="\t", header=None, names=columns, index_col=0, parse_dates=True
)
return df.drop(columns=["garbage 1", "garbage 2"])
environment_df = tmp_df.drop(columns=["garbage 1", "garbage 2"])
return PrintLogResult(environment_df, print_info)

71
s3printlog/main.py

@ -12,36 +12,48 @@ from .analysis import ( @@ -12,36 +12,48 @@ from .analysis import (
generate_environment_graph,
find_missing_drops,
)
from .logparser import (
get_log_files,
parse_log_files,
parse_environment_log,
DROP_CHECK_SUFFIX,
ENVIRONMENT_SUFFIX,
)
from .logparser import parse_log_files, parse_print_log
from .report import generate_report
DROP_CHECK_SUFFIX = ".cor"
ENVIRONMENT_SUFFIX = "_Logfile.log"
DropProcessResult = namedtuple("DropProcessResult", ["drops", "missing"])
PrintLogResult = namedtuple("PrintLogResult", ["environment", "info"])
ProcessResult = namedtuple("ProcessResult", ["data_frame", "file_path"])
class LogFiles(namedtuple("LogFiles", ["folder", "drop_check", "environment"])):
__slots__ = ()
def __bool__(self):
return self.drop_check and self.environment
class NoLogFileError(IOError):
pass
ProcessResult = namedtuple("ProcessResult", ["data_frame", "file_path"])
DropProcessResult = namedtuple("DropProcessResult", ["drops", "missing"])
def get_log_files(folder):
folder = pathlib.Path(folder)
visible = [p for p in folder.iterdir() if not p.name.startswith(".")]
drop_files = [p for p in visible if p.name.endswith(DROP_CHECK_SUFFIX)]
env_files = [p for p in visible if p.name.endswith(ENVIRONMENT_SUFFIX)]
if len(env_files) != 1:
env_files = [None]
return LogFiles(folder, drop_files, env_files[0])
def process_drop_checks(folder):
drop_log_paths = get_log_files(folder, suffix=DROP_CHECK_SUFFIX)
if len(drop_log_paths) == 0:
raise NoLogFileError("Drop Check Files Not Found")
drop_log_df = parse_log_files(drop_log_paths)
def process_drop_checks(log_files):
drop_log_df = parse_log_files(log_files.drop_check)
generate_drop_check_chart(drop_log_df)
image_path = folder / "Drop Check.png"
image_path = log_files.folder / f"{log_files.folder}_drop_check.png"
plt.savefig(image_path)
missing_drop_df = find_missing_drops(drop_log_df)
misssing_drop_list_path = folder / "Missed spots.xlsx"
misssing_drop_list_path = log_files.folder / f"{log_files.folder}_missed_spots.xlsx"
missing_drop_df.to_excel(misssing_drop_list_path)
return DropProcessResult(
@ -50,26 +62,29 @@ def process_drop_checks(folder): @@ -50,26 +62,29 @@ def process_drop_checks(folder):
)
def process_environment(folder):
env_log_paths = get_log_files(folder, suffix=ENVIRONMENT_SUFFIX)
if len(env_log_paths) != 1:
raise NoLogFileError("Log File Not Found")
env_log_df = parse_environment_log(env_log_paths[0])
def process_print_log(log_files):
print_log = parse_print_log(log_files.environment)
generate_environment_graph(env_log_df)
image_path = folder / "Environment.png"
generate_environment_graph(print_log.environment)
image_path = log_files.folder / f"{log_files.folder}_environment.png"
plt.savefig(image_path)
return ProcessResult(env_log_df, image_path)
return PrintLogResult(ProcessResult(print_log.environment, image_path), print_log.info)
def process_log_folder(folder):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
folder = pathlib.Path(folder)
drops, missing = process_drop_checks(folder)
environment = process_environment(folder)
return generate_report(folder, drops, missing, environment)
log_files = get_log_files(folder)
drop_check_result = process_drop_checks(log_files)
print_log_result = process_print_log(log_files)
return generate_report(
log_files,
drop_check_result.drops,
drop_check_result.missing,
print_log_result.environment,
print_log_result.info,
)
def open_with_default_app(some_path):

35
s3printlog/report.py

@ -15,12 +15,14 @@ from reportlab.platypus import ( @@ -15,12 +15,14 @@ from reportlab.platypus import (
Paragraph,
Spacer,
SimpleDocTemplate,
Table,
)
ImageBuffer = namedtuple("ImageBuffer", ["buffer", "width", "height"])
FailedDropImage = namedtuple("FailedDropImage", ["path", "well"])
styles = getSampleStyleSheet()
style_n = styles["Normal"]
style_h1 = styles["Heading1"]
@ -110,7 +112,31 @@ def failed_drops_flowable(drops, missing, what): @@ -110,7 +112,31 @@ def failed_drops_flowable(drops, missing, what):
return section
def generate_report(folder, drops, missing, environment):
def print_info_flowable(print_info):
data = [
("Source Plate:", print_info["source"]),
("Print Solutions:", f"{print_info['solutions']} different solutions"),
("Target Substrate:", print_info["target"]),
("Number of Fields:", f"{print_info['fields']} fields printed"),
("Run Method:", print_info["run"]),
(
"Humidity Setting:",
f"{print_info['humidity']} (humidifier might be turned off)",
),
]
# Table(data, colWidths=None, rowHeights=None, style=None, splitByRow=1,
# repeatRows=0, repeatCols=0, rowSplitRange=None, spaceBefore=None,
# spaceAfter=None)
return Table(data, style=[
('TOPPADDING', (0, 0), (-1, -1), 1),
('RIGHTPADDING', (0, 0), (-1, -1), 7),
('BOTTOMPADDING', (0, 0), (-1, -1), 1),
('LEFTPADDING', (0, 0), (-1, -1), 0),
],hAlign='LEFT'
)
def generate_report(log_files, drops, missing, environment, print_info):
story = []
@ -122,12 +148,15 @@ def generate_report(folder, drops, missing, environment): @@ -122,12 +148,15 @@ def generate_report(folder, drops, missing, environment):
story.append(headline)
story.append(Spacer(width=17 * cm, height=0.5 * cm))
story.append(print_info_flowable(print_info))
story.append(Spacer(width=17 * cm, height=0.5 * cm))
story.append(graph_flowable("Drop Check Graphs", drops.file_path))
story.extend(failed_drops_flowable(drops, missing, "pre run"))
story.extend(failed_drops_flowable(drops, missing, "post run"))
if len(story) == 3:
if len(story) == 5:
# no failed drop checks where reported
story.append(Spacer(width=17 * cm, height=1 * cm))
story.append(Paragraph("No failed drop checks found.", style_n))
@ -135,7 +164,7 @@ def generate_report(folder, drops, missing, environment): @@ -135,7 +164,7 @@ def generate_report(folder, drops, missing, environment):
story.append(PageBreak())
story.append(graph_flowable("Environment Graphs", environment.file_path))
pdf_path = folder / "print_report.pdf"
pdf_path = log_files.folder / f"{log_files.folder}_report.pdf"
doc = SimpleDocTemplate(
str(pdf_path),
pagesize=A4,

12
test.py

@ -0,0 +1,12 @@ @@ -0,0 +1,12 @@
# import s3printlog
# s3printlog.run("example 1")
from s3printlog import main
folder = "example 1"
#folder = "example 2"
print("Generating report, PDF should be opened in a couple of seconds")
report_file = main.process_log_folder(folder)
main.open_with_default_app(report_file)
print("Report Generated.")
Loading…
Cancel
Save