From be24fbaaca7858b21e2dab5b7a83696d9b39aeaf Mon Sep 17 00:00:00 2001 From: Holger Frey Date: Thu, 19 Nov 2020 15:40:34 +0100 Subject: [PATCH] linted --- Makefile | 6 +++--- sensospot_data/normalisation.py | 8 ++++---- tests/test_normailsation.py | 16 ++++++++-------- tests/test_parameters.py | 4 ++-- tests/test_parser.py | 21 ++++++++++++--------- tests/test_sensovation_data_parser.py | 2 +- 6 files changed, 30 insertions(+), 27 deletions(-) diff --git a/Makefile b/Makefile index ace9ea1..8b45bdd 100644 --- a/Makefile +++ b/Makefile @@ -51,10 +51,10 @@ clean-test: ## remove test and coverage artifacts rm -fr htmlcov/ lint: ## reformat with black and check style with flake8 - isort -rc sensospot_data - isort -rc tests + isort sensospot_data + isort tests black sensospot_data tests - flake8 --ignore E231,W503 sensospot_data tests + flake8 --ignore E231,W503,E402 sensospot_data tests test: ## run tests quickly with the default Python pytest tests -x --disable-warnings diff --git a/sensospot_data/normalisation.py b/sensospot_data/normalisation.py index 2d0ced0..1f1b28b 100644 --- a/sensospot_data/normalisation.py +++ b/sensospot_data/normalisation.py @@ -24,7 +24,7 @@ def _split_data_frame(data_frame, column): def _infer_exposure_from_parameters(data_frame): - """ infer the exposures from measurement parameters + """infer the exposures from measurement parameters will raise a ValueError if the parameters contain NaNs """ @@ -42,7 +42,7 @@ def _infer_exposure_from_parameters(data_frame): def apply_exposure_map(data_frame, exposure_map=None): - """ applies the parameters of a exposure map to the data frame + """applies the parameters of a exposure map to the data frame exposure map: keys: must be the same as the exposure ids, @@ -124,7 +124,7 @@ def _infer_normalization_map(split_data_frames): def normalize_exposure_time(split_data_frames): - """ add time normalized values to the split data frames + """add time normalized values to the split data frames The max exposure time per channel is used for normalization. """ @@ -153,7 +153,7 @@ def normalize_measurement( overflow_column=COL_NAME_SPOT_MEAN, overflow_limit=0.5, ): - """ augment normalize the measurement exposures + """augment normalize the measurement exposures exposure map: keys: must be the same as the exposure ids, diff --git a/tests/test_normailsation.py b/tests/test_normailsation.py index c688692..302f1f6 100644 --- a/tests/test_normailsation.py +++ b/tests/test_normailsation.py @@ -127,8 +127,8 @@ def test_check_overflow_limit_custom_limit(): def test_reduce_overflow_in_channel(normalization_data_frame): from sensospot_data.normalisation import ( - _reduce_overflow_in_channel, _check_overflow_limit, + _reduce_overflow_in_channel, ) data_frame = _check_overflow_limit( @@ -158,8 +158,8 @@ def test_reduce_overflow_in_channel(normalization_data_frame): def test_reduce_overflow_in_channel_shortcut(normalization_data_frame): from sensospot_data.normalisation import ( - _reduce_overflow_in_channel, _check_overflow_limit, + _reduce_overflow_in_channel, ) normalization_data_frame["Exposure.Time"] = 1 @@ -201,8 +201,8 @@ def test_reduce_overflow(normalization_data_frame): def test_infer_normalization_map(normalization_data_frame): from sensospot_data.normalisation import ( - _infer_normalization_map, _split_data_frame, + _infer_normalization_map, ) normalization_data_frame.loc[5, "Exposure.Channel"] = "Cy3" @@ -216,11 +216,11 @@ def test_infer_normalization_map(normalization_data_frame): def test_normalize_exposure(normalization_data_frame): + from sensospot_data.columns import COLUMN_NORMALIZATION from sensospot_data.normalisation import ( - _normalize_exposure, reduce_overflow, + _normalize_exposure, ) - from sensospot_data.columns import COLUMN_NORMALIZATION reduced = reduce_overflow(normalization_data_frame, "Saturation", 1) result = _normalize_exposure(reduced["Cy5"], 100) @@ -236,8 +236,8 @@ def test_normalize_exposure(normalization_data_frame): def test_normalize_exposure_time(normalization_data_frame): from sensospot_data.normalisation import ( - normalize_exposure_time, reduce_overflow, + normalize_exposure_time, ) reduced = reduce_overflow(normalization_data_frame, "Saturation", 1) @@ -255,8 +255,8 @@ def test_normalize_exposure_time(normalization_data_frame): def test_normalize_exposure_time_infered_map(normalization_data_frame): from sensospot_data.normalisation import ( - normalize_exposure_time, reduce_overflow, + normalize_exposure_time, ) reduced = reduce_overflow(normalization_data_frame, "Saturation", 1) @@ -273,8 +273,8 @@ def test_normalize_exposure_time_infered_map(normalization_data_frame): def test_normalize_measurement(example_dir): - from sensospot_data.normalisation import normalize_measurement from sensospot_data.parser import process_folder + from sensospot_data.normalisation import normalize_measurement sub_dir = example_dir / EXAMPLE_DIR_WITH_PARAMS data_frame = process_folder(sub_dir) diff --git a/tests/test_parameters.py b/tests/test_parameters.py index 460847c..86571c0 100644 --- a/tests/test_parameters.py +++ b/tests/test_parameters.py @@ -34,8 +34,8 @@ def test_ssearch_measurement_params_file_parameters_file(tmpdir): def test_parse_channel_info(example_dir): from sensospot_data.parameters import ( - _search_measurement_params_file, _parse_measurement_params, + _search_measurement_params_file, ) params = _search_measurement_params_file( @@ -70,8 +70,8 @@ def test_get_measurement_params_file_not_found(example_dir): def test_add_measurement_params(exposure_df): from sensospot_data.parameters import ( - _add_measurement_params, MeasurementParams, + _add_measurement_params, ) params = { diff --git a/tests/test_parser.py b/tests/test_parser.py index f5a02b0..01d4921 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -68,9 +68,10 @@ def test_parse_csv_no_array(example_dir): "input, expected", [("", "."), ("..,", "."), (".,,", ","), ("..,,", ".")] ) def test_guess_decimal_separator_returns_correct_separator(input, expected): - from sensospot_data.parser import _guess_decimal_separator from io import StringIO + from sensospot_data.parser import _guess_decimal_separator + handle = StringIO(f"header\n{input}\n") result = _guess_decimal_separator(handle) @@ -78,9 +79,10 @@ def test_guess_decimal_separator_returns_correct_separator(input, expected): def test_guess_decimal_separator_rewinds_handle(): - from sensospot_data.parser import _guess_decimal_separator from io import StringIO + from sensospot_data.parser import _guess_decimal_separator + handle = StringIO("\n".join(["header", "data_line"])) _guess_decimal_separator(handle) @@ -126,9 +128,10 @@ def test_extract_measurement_info_raises_error(filename): def test_cleanup_data_columns(): - from sensospot_data.parser import _cleanup_data_columns from pandas import DataFrame + from sensospot_data.parser import _cleanup_data_columns + columns = ["Rect.", "Contour", " ID ", "Found", "Dia."] data = {col: [i] for i, col in enumerate(columns)} data_frame = DataFrame(data=data) @@ -274,8 +277,8 @@ def test_sanity_check_raises_value_error(example_dir): def test_get_cache_table_name(): - from sensospot_data.parser import _get_cache_table_name from sensospot_data import VERSION_TABLE_NAME + from sensospot_data.parser import _get_cache_table_name result = _get_cache_table_name() @@ -283,7 +286,7 @@ def test_get_cache_table_name(): def test_process_folder_creates_cache(dir_for_caching): - from sensospot_data.parser import process_folder, CACHE_FILE_NAME + from sensospot_data.parser import CACHE_FILE_NAME, process_folder cache_path = dir_for_caching / CACHE_FILE_NAME assert not cache_path.is_file() @@ -309,7 +312,7 @@ def test_process_folder_reads_from_cache(dir_for_caching, example_file): def test_process_folder_read_cache_fails_silently( dir_for_caching, exposure_df ): - from sensospot_data.parser import process_folder, CACHE_FILE_NAME + from sensospot_data.parser import CACHE_FILE_NAME, process_folder cache_path = dir_for_caching / CACHE_FILE_NAME exposure_df.to_hdf(cache_path, "unknown table") @@ -321,9 +324,9 @@ def test_process_folder_read_cache_fails_silently( def test_process_folder_read_cache_no_cache_arg(dir_for_caching, exposure_df): from sensospot_data.parser import ( + CACHE_FILE_NAME, process_folder, _get_cache_table_name, - CACHE_FILE_NAME, ) cache_path = dir_for_caching / CACHE_FILE_NAME @@ -335,7 +338,7 @@ def test_process_folder_read_cache_no_cache_arg(dir_for_caching, exposure_df): def test_process_folder_writes_cache(dir_for_caching): - from sensospot_data.parser import process_folder, CACHE_FILE_NAME + from sensospot_data.parser import CACHE_FILE_NAME, process_folder process_folder(dir_for_caching, use_cache=True) @@ -344,7 +347,7 @@ def test_process_folder_writes_cache(dir_for_caching): def test_process_folder_writes_cache_no_cache_arg(dir_for_caching): - from sensospot_data.parser import process_folder, CACHE_FILE_NAME + from sensospot_data.parser import CACHE_FILE_NAME, process_folder process_folder(dir_for_caching, use_cache=False) diff --git a/tests/test_sensovation_data_parser.py b/tests/test_sensovation_data_parser.py index c348d13..8fe7e0d 100644 --- a/tests/test_sensovation_data_parser.py +++ b/tests/test_sensovation_data_parser.py @@ -5,6 +5,6 @@ def test_import_api(): from sensospot_data import CACHE_FILE_NAME # noqa: F401 from sensospot_data import parse_file # noqa: F401 from sensospot_data import parse_folder # noqa: F401 - from sensospot_data import parse_multiple_files # noqa: F401 from sensospot_data import process_folder # noqa: F401 + from sensospot_data import parse_multiple_files # noqa: F401 from sensospot_data import normalize_measurement # noqa: F401