forked from BAMresearch/dataMerge
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
146 lines (129 loc) · 4.42 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
#!/usr/bin/env python
# coding: utf-8
# requires at least attrs version == 21.4
from pathlib import Path
import argparse
from sys import platform
import logging
from datamerge.readersandwriters import SDOListFromFiles, readConfigObjFromYaml
from datamerge.readersandwriters import mergeConfigObjFromYaml
from datamerge.readersandwriters import outputToNX
from datamerge.mergecore import mergeCore
from datamerge.plotting import plotFigure
import sys
def isMac() -> bool:
return platform == "darwin"
def filelistFromArgs(argDict: dict) -> list:
"""
Takes the parsed command-line argument dictionary
and returns the list of filenames
"""
fnames = argDict["dataFiles"]
if len(fnames) == 1:
if fnames[0].is_dir():
# glob the files from the globkey in Path
fnames = sorted(fnames[0].glob(argDict["globKey"]))
logging.info(f"Found the following files to merge: {fnames}")
assert len(fnames) > 0, "length of filename list to merge is zero, cannot merge."
assert isinstance(fnames, list)
return fnames
def configureParser() -> argparse.ArgumentParser:
# process input arguments
parser = argparse.ArgumentParser(
description="""
Runs a datamerge binning/rebinning operation from the command line for processed MOUSE data.
For this to work, you need to have YAML-formatted configuration files ready.
Examples of these configuration files are provided in the examples subdirectory.
Released under a GPLv3+ license.
"""
)
# TODO: add info about output files to be created ...
parser.add_argument(
"-f",
"--dataFiles",
type=lambda p: Path(p).absolute(),
default=Path(__file__).absolute().parent / "testdata" / "quickstartdemo1.csv",
help="Path to the filenames with the SAXS data. If this is a directory, all *processed.nxs files are globbed",
nargs="+",
required=True,
)
parser.add_argument(
"-g",
"--globKey",
type=str,
default="*.nxs",
help="If filename path is a directory, this will be the glob key to find the files to merge",
# required=True,
)
parser.add_argument(
"-o",
"--outputFile",
type=lambda p: Path(p).absolute(),
default=Path(__file__).absolute().parent / "test.nxs",
help="Path to the files to store the datamerge result in",
# required=True,
)
parser.add_argument(
"-C",
"--configFile",
type=lambda p: Path(p).absolute(),
default=Path(__file__).absolute().parent / "defaults" / "mergeConfig.yaml",
help="Path to the datamerge configuration (yaml) file",
# required=True,
)
parser.add_argument(
"-r",
"--raiseFileReadWarning",
default=False,
action="store_true",
help="If there is a problem reading in a datafile, raise error instead of skip",
# required=True,
)
parser.add_argument(
"-w",
"--writeOriginalData",
default=False,
action="store_true",
help="If set, will add the original read-in data to the output file structure",
# required=True,
)
return parser
if __name__ == "__main__":
parser = configureParser()
try:
args = parser.parse_args()
except SystemExit:
raise
# initiate logging (to console stderr for now)
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
adict = vars(args)
try:
dataList = SDOListFromFiles(
filelistFromArgs(adict),
readConfig=readConfigObjFromYaml(adict["configFile"]),
)
except KeyError:
logging.warning(
f"The nexus files do not contain fully processed data, skipping. \n used settings: {adict}"
)
if adict["raiseFileReadWarning"]:
raise
else:
sys.exit(0)
m = mergeCore(
mergeConfig=mergeConfigObjFromYaml(adict["configFile"]),
dataList=dataList,
)
filteredMDO = m.run()
# export to the final files
ofname = Path(adict["outputFile"])
logging.debug(f"8. Storing result in output file {ofname}")
outputToNX(
ofname=ofname,
mco=m.mergeConfig,
mdo=filteredMDO,
rangeList=m.ranges,
writeOriginalData=adict["writeOriginalData"],
)
# make the plots.
plotFigure(m, ofname=Path(adict["outputFile"]))