Changeset 2387abc in sasview for src/sas/sascalc
- Timestamp:
- Aug 19, 2016 11:02:55 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 6f343e3
- Parents:
- 32c5983 (diff), 45d7662 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc
- Files:
-
- 5 added
- 1 deleted
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/data_info.py
rd72567e r45d7662 445 445 return len(self.name) == 0 and len(self.date) == 0 and len(self.description) == 0 \ 446 446 and len(self.term) == 0 and len(self.notes) == 0 447 447 448 448 def single_line_desc(self): 449 449 """ … … 451 451 """ 452 452 return "%s %s %s" % (self.name, self.date, self.description) 453 453 454 454 def __str__(self): 455 455 _str = "Process:\n" … … 1037 1037 _str += " Z-axis: %s\t[%s]\n" % (self._zaxis, self._zunit) 1038 1038 _str += " Length: %g \n" % (len(self.data)) 1039 _str += " Shape: (%d, %d)\n" % (len(self.y_bins), len(self.x_bins)) 1039 1040 return _str 1040 1041 … … 1220 1221 result.mask = numpy.append(self.mask, other.mask) 1221 1222 if result.err_data is not None: 1222 result.err_data = numpy.append(self.err_data, other.err_data) 1223 result.err_data = numpy.append(self.err_data, other.err_data) 1223 1224 if self.dqx_data is not None: 1224 1225 result.dqx_data = numpy.append(self.dqx_data, other.dqx_data) … … 1252 1253 final_dataset.yaxis(data._yaxis, data._yunit) 1253 1254 final_dataset.zaxis(data._zaxis, data._zunit) 1255 final_dataset.x_bins = data.x_bins 1256 final_dataset.y_bins = data.y_bins 1254 1257 else: 1255 1258 return_string = "Should Never Happen: _combine_data_info_with_plottable input is not a plottable1d or " + \ -
src/sas/sascalc/dataloader/readers/cansas_constants.py
rd398285 r250fec92 27 27 return_me.current_level = self.CANSAS_FORMAT.get("SASentry") 28 28 # Defaults for variable and datatype 29 return_me.ns_variable = "{0}.meta_data[\"{2}\"] = \"{1}\""30 29 return_me.ns_datatype = "content" 31 30 return_me.ns_optional = True … … 38 37 return_me.current_level = \ 39 38 return_me.current_level.get("<any>", "") 40 cl_variable = return_me.current_level.get("variable", "")41 39 cl_datatype = return_me.current_level.get("storeas", "") 42 40 cl_units_optional = \ 43 return_me.current_level.get("units_ required", "")41 return_me.current_level.get("units_optional", "") 44 42 # Where are how to store the variable for the given 45 43 # namespace CANSAS_CONSTANTS tree is hierarchical, so 46 44 # is no value, inherit 47 return_me.ns_variable = cl_variable if cl_variable != "" \48 else return_me.ns_variable49 45 return_me.ns_datatype = cl_datatype if cl_datatype != "" \ 50 46 else return_me.ns_datatype … … 53 49 else return_me.ns_optional 54 50 except AttributeError: 55 return_me.ns_variable = "{0}.meta_data[\"{2}\"] = \"{1}\""56 51 return_me.ns_datatype = "content" 57 52 return_me.ns_optional = True … … 75 70 # The constants below hold information on where to store the CanSAS data 76 71 # when loaded in using sasview 77 META_DATA = "{0}.meta_data[\"{2}\"] = \"{1}\"" 78 ANY = {"variable" : "{0}.meta_data[\"{2}\"] = \'{1}\'", 79 "storeas" : "content", 80 } 81 TITLE = {"variable" : "{0}.title = \"{1}\""} 82 SASNOTE = {"variable" : "{0}.notes.append(\'{1}\')"} 83 SASPROCESS_TERM = {"variable" : None, 84 "attributes" : {"unit" : {"variable" : None}, 85 "name" : {"variable" : None} 86 } 87 } 88 SASPROCESS_SASPROCESSNOTE = {"variable" : None, 89 "children" : {"<any>" : ANY} 90 } 91 SASPROCESS = {"variable" : None, 92 "children" : {"name" : {"variable" : "{0}.name = \'{1}\'"}, 93 "date" : {"variable" : "{0}.date = \'{1}\'"}, 94 "description" : 95 {"variable" : "{0}.description = \'{1}\'"}, 72 ANY = {"storeas" : "content"} 73 TITLE = {} 74 SASNOTE = {} 75 SASPROCESS_TERM = {"attributes" : {"unit" : {}, "name" : {}}} 76 SASPROCESS_SASPROCESSNOTE = {"children" : {"<any>" : ANY}} 77 SASPROCESS = {"children" : {"name" : {}, 78 "date" : {}, 79 "description" : {}, 96 80 "term" : SASPROCESS_TERM, 97 81 "SASprocessnote" : SASPROCESS_SASPROCESSNOTE, … … 99 83 }, 100 84 } 101 RUN = {"variable" : "{0}.run.append(\"{1}\")", 102 "attributes" : {"name" : 103 {"variable" : "{0}.run_name[\"{3}\"] = \"{1}\""}} 104 } 105 SASDATA_IDATA_Q = {"variable" : "{0}.x = numpy.append({0}.x, {1})", 106 "unit" : "x_unit", 107 "attributes" : {"unit" : 108 {"variable" : "{0}.xaxis(\"Q\", \'{1}\')", 109 "storeas" : "content" 110 } 111 }, 112 } 113 SASDATA_IDATA_I = {"variable" : "{0}.y = numpy.append({0}.y, {1})", 114 "unit" : "y_unit", 115 "attributes" : {"unit" : 116 {"variable" : "{0}.yaxis(\"Intensity\", \'{1}\')", 117 "storeas" : "content" 118 } 119 }, 120 } 121 SASDATA_IDATA_IDEV = {"variable" : "{0}.dy = numpy.append({0}.dy, {1})", 85 RUN = {"attributes" : {"name" :{}}} 86 SASDATA_IDATA_Q = {"units_optional" : False, 87 "storeas" : "float", 88 "unit" : "x_unit", 89 "attributes" : {"unit" : {"storeas" : "content"}}, 90 } 91 SASDATA_IDATA_I = {"units_optional" : False, 92 "storeas" : "float", 93 "unit" : "y_unit", 94 "attributes" : {"unit" : {"storeas" : "content"}}, 95 } 96 SASDATA_IDATA_IDEV = {"units_optional" : False, 97 "storeas" : "float", 122 98 "unit" : "y_unit", 123 "attributes" : {"unit" : 124 {"variable" : META_DATA, 125 "storeas" : "content" 126 } 127 }, 128 } 129 SASDATA_IDATA_QDEV = {"variable" : "{0}.dx = numpy.append({0}.dx, {1})", 99 "attributes" : {"unit" : {"storeas" : "content"}}, 100 } 101 SASDATA_IDATA_QDEV = {"units_optional" : False, 102 "storeas" : "float", 130 103 "unit" : "x_unit", 131 "attributes" : {"unit" : 132 {"variable" : META_DATA, 133 "storeas" : "content" 134 } 135 }, 136 } 137 SASDATA_IDATA_DQL = {"variable" : "{0}.dxl = numpy.append({0}.dxl, {1})", 104 "attributes" : {"unit" : {"storeas" : "content"}}, 105 } 106 SASDATA_IDATA_DQL = {"units_optional" : False, 107 "storeas" : "float", 138 108 "unit" : "x_unit", 139 "attributes" : {"unit" : 140 {"variable" : META_DATA, 141 "storeas" : "content" 142 } 143 }, 144 } 145 SASDATA_IDATA_DQW = {"variable" : "{0}.dxw = numpy.append({0}.dxw, {1})", 109 "attributes" : {"unit" : {"storeas" : "content"}}, 110 } 111 SASDATA_IDATA_DQW = {"units_optional" : False, 112 "storeas" : "float", 146 113 "unit" : "x_unit", 147 "attributes" : {"unit" : 148 {"variable" : META_DATA, 149 "storeas" : "content" 150 } 151 }, 152 } 153 SASDATA_IDATA_QMEAN = {"storeas" : "content", 154 "unit" : "x_unit", 155 "variable" : META_DATA, 156 "attributes" : {"unit" : {"variable" : META_DATA}}, 114 "attributes" : {"unit" : {"storeas" : "content"}}, 115 } 116 SASDATA_IDATA_QMEAN = {"unit" : "x_unit", 117 "attributes" : {"unit" : {}}, 157 118 } 158 SASDATA_IDATA_SHADOWFACTOR = {"variable" : META_DATA, 159 "storeas" : "content", 160 } 161 SASDATA_IDATA = {"storeas" : "float", 162 "units_optional" : False, 163 "variable" : None, 164 "attributes" : {"name" : {"variable" : META_DATA, 165 "storeas" : "content", 166 }, 167 "timestamp" : {"variable" : META_DATA, 168 "storeas" : "timestamp", 169 } 170 }, 119 SASDATA_IDATA_SHADOWFACTOR = {} 120 SASDATA_IDATA = {"attributes" : {"name" : {},"timestamp" : {"storeas" : "timestamp"}}, 171 121 "children" : {"Q" : SASDATA_IDATA_Q, 172 122 "I" : SASDATA_IDATA_I, … … 180 130 } 181 131 } 182 SASDATA = {"attributes" : {"name" : { "variable" : META_DATA,}},132 SASDATA = {"attributes" : {"name" : {}}, 183 133 "variable" : None, 184 134 "children" : {"Idata" : SASDATA_IDATA, … … 186 136 } 187 137 } 188 SASTRANSSPEC_TDATA_LAMDBA = {" variable" : "{0}.wavelength.append({1})",138 SASTRANSSPEC_TDATA_LAMDBA = {"storeas" : "float", 189 139 "unit" : "wavelength_unit", 190 "attributes" : 191 {"unit" : 192 {"variable" : "{0}.wavelength_unit = \"{1}\"", 193 "storeas" : "content" 194 } 195 } 140 "attributes" : {"unit" : {"storeas" : "content"}} 196 141 } 197 SASTRANSSPEC_TDATA_T = {" variable" : "{0}.transmission.append({1})",142 SASTRANSSPEC_TDATA_T = {"storeas" : "float", 198 143 "unit" : "transmission_unit", 199 "attributes" : 200 {"unit" : 201 {"variable" : "{0}.transmission_unit = \"{1}\"", 202 "storeas" : "content" 203 } 204 } 205 } 206 SASTRANSSPEC_TDATA_TDEV = {"variable" : 207 "{0}.transmission_deviation.append({1})", 144 "attributes" : {"unit" : {"storeas" : "content"}} 145 } 146 SASTRANSSPEC_TDATA_TDEV = {"storeas" : "float", 208 147 "unit" : "transmission_deviation_unit", 209 "attributes" : 210 {"unit" : 211 {"variable" : 212 "{0}.transmission_deviation_unit = \"{1}\"", 213 "storeas" : "content" 214 } 215 } 216 } 217 SASTRANSSPEC_TDATA = {"storeas" : "float", 218 "variable" : None, 219 "children" : {"Lambda" : SASTRANSSPEC_TDATA_LAMDBA, 148 "attributes" : {"unit" :{"storeas" : "content"}} 149 } 150 SASTRANSSPEC_TDATA = {"children" : {"Lambda" : SASTRANSSPEC_TDATA_LAMDBA, 220 151 "T" : SASTRANSSPEC_TDATA_T, 221 152 "Tdev" : SASTRANSSPEC_TDATA_TDEV, … … 223 154 } 224 155 } 225 SASTRANSSPEC = {"variable" : None, 226 "children" : {"Tdata" : SASTRANSSPEC_TDATA, 156 SASTRANSSPEC = {"children" : {"Tdata" : SASTRANSSPEC_TDATA, 227 157 "<any>" : ANY, 228 158 }, 229 "attributes" : 230 {"name" : 231 {"variable" : "{0}.name = \"{1}\""}, 232 "timestamp" : 233 {"variable" : "{0}.timestamp = \"{1}\""}, 234 } 159 "attributes" : {"name" :{}, "timestamp" : {},} 235 160 } 236 SASSAMPLE_THICK = {"variable" : "{0}.sample.thickness = {1}", 237 "unit" : "sample.thickness_unit", 238 "storeas" : "float", 239 "attributes" : 240 {"unit" : 241 {"variable" : "{0}.sample.thickness_unit = \"{1}\"", 242 "storeas" : "content" 243 } 244 }, 245 } 246 SASSAMPLE_TRANS = {"variable" : "{0}.sample.transmission = {1}", 247 "storeas" : "float", 248 } 249 SASSAMPLE_TEMP = {"variable" : "{0}.sample.temperature = {1}", 250 "unit" : "sample.temperature_unit", 161 SASSAMPLE_THICK = {"unit" : "thickness_unit", 162 "storeas" : "float", 163 "attributes" : {"unit" :{}}, 164 } 165 SASSAMPLE_TRANS = {"storeas" : "float",} 166 SASSAMPLE_TEMP = {"unit" : "temperature_unit", 251 167 "storeas" : "float", 252 "attributes" : 253 {"unit" : 254 {"variable" : "{0}.sample.temperature_unit = \"{1}\"", 255 "storeas" : "content" 256 } 257 }, 168 "attributes" :{"unit" :{}}, 258 169 } 259 SASSAMPLE_POS_ATTR = {"unit" : {"variable" : 260 "{0}.sample.position_unit = \"{1}\"", 261 "storeas" : "content" 262 } 263 } 264 SASSAMPLE_POS_X = {"variable" : "{0}.sample.position.x = {1}", 265 "unit" : "sample.position_unit", 170 SASSAMPLE_POS_ATTR = {"unit" : {}} 171 SASSAMPLE_POS_X = {"unit" : "position_unit", 266 172 "storeas" : "float", 267 173 "attributes" : SASSAMPLE_POS_ATTR 268 174 } 269 SASSAMPLE_POS_Y = {"variable" : "{0}.sample.position.y = {1}", 270 "unit" : "sample.position_unit", 175 SASSAMPLE_POS_Y = {"unit" : "position_unit", 271 176 "storeas" : "float", 272 177 "attributes" : SASSAMPLE_POS_ATTR 273 178 } 274 SASSAMPLE_POS_Z = {"variable" : "{0}.sample.position.z = {1}", 275 "unit" : "sample.position_unit", 179 SASSAMPLE_POS_Z = {"unit" : "position_unit", 276 180 "storeas" : "float", 277 181 "attributes" : SASSAMPLE_POS_ATTR 278 182 } 279 SASSAMPLE_POS = {"children" : {"variable" : None, 280 "x" : SASSAMPLE_POS_X, 183 SASSAMPLE_POS = {"children" : {"x" : SASSAMPLE_POS_X, 281 184 "y" : SASSAMPLE_POS_Y, 282 185 "z" : SASSAMPLE_POS_Z, 283 186 }, 284 187 } 285 SASSAMPLE_ORIENT_ATTR = {"unit" : 286 {"variable" : 287 "{0}.sample.orientation_unit = \"{1}\"", 288 "storeas" : "content" 289 } 290 } 291 SASSAMPLE_ORIENT_ROLL = {"variable" : "{0}.sample.orientation.x = {1}", 292 "unit" : "sample.orientation_unit", 188 SASSAMPLE_ORIENT_ATTR = {"unit" :{}} 189 SASSAMPLE_ORIENT_ROLL = {"unit" : "orientation_unit", 293 190 "storeas" : "float", 294 191 "attributes" : SASSAMPLE_ORIENT_ATTR 295 192 } 296 SASSAMPLE_ORIENT_PITCH = {"variable" : "{0}.sample.orientation.y = {1}", 297 "unit" : "sample.orientation_unit", 193 SASSAMPLE_ORIENT_PITCH = {"unit" : "orientation_unit", 298 194 "storeas" : "float", 299 195 "attributes" : SASSAMPLE_ORIENT_ATTR 300 196 } 301 SASSAMPLE_ORIENT_YAW = {"variable" : "{0}.sample.orientation.z = {1}", 302 "unit" : "sample.orientation_unit", 197 SASSAMPLE_ORIENT_YAW = {"unit" : "orientation_unit", 303 198 "storeas" : "float", 304 199 "attributes" : SASSAMPLE_ORIENT_ATTR 305 200 } 306 SASSAMPLE_ORIENT = {"variable" : None, 307 "children" : {"roll" : SASSAMPLE_ORIENT_ROLL, 201 SASSAMPLE_ORIENT = {"children" : {"roll" : SASSAMPLE_ORIENT_ROLL, 308 202 "pitch" : SASSAMPLE_ORIENT_PITCH, 309 203 "yaw" : SASSAMPLE_ORIENT_YAW, … … 311 205 } 312 206 SASSAMPLE = {"attributes" : 313 {"name" : {"variable" : "{0}.sample.name = \"{1}\""},}, 314 "variable" : None, 315 "children" : {"ID" : {"variable" : "{0}.sample.ID = \"{1}\""}, 207 {"name" : {},}, 208 "children" : {"ID" : {}, 316 209 "thickness" : SASSAMPLE_THICK, 317 210 "transmission" : SASSAMPLE_TRANS, … … 319 212 "position" : SASSAMPLE_POS, 320 213 "orientation" : SASSAMPLE_ORIENT, 321 "details" : 322 {"variable" : 323 "{0}.sample.details.append(\"{1}\")"}, 214 "details" : {}, 324 215 "<any>" : ANY 325 216 }, 326 217 } 327 SASINSTR_SRC_BEAMSIZE_ATTR = {"unit" : 328 "{0}.source.beam_size_unit = \"{1}\"", 329 "storeas" : "content" 330 } 331 SASINSTR_SRC_BEAMSIZE_X = {"variable" : "{0}.source.beam_size.x = {1}", 332 "unit" : "source.beam_size_unit", 218 SASINSTR_SRC_BEAMSIZE_ATTR = {"unit" : ""} 219 SASINSTR_SRC_BEAMSIZE_X = {"unit" : "beam_size_unit", 333 220 "storeas" : "float", 334 221 "attributes" : SASINSTR_SRC_BEAMSIZE_ATTR 335 222 } 336 SASINSTR_SRC_BEAMSIZE_Y = {"variable" : "{0}.source.beam_size.y = {1}", 337 "unit" : "source.beam_size_unit", 223 SASINSTR_SRC_BEAMSIZE_Y = {"unit" : "beam_size_unit", 338 224 "storeas" : "float", 339 225 "attributes" : SASINSTR_SRC_BEAMSIZE_ATTR 340 226 } 341 SASINSTR_SRC_BEAMSIZE_Z = {"variable" : "{0}.source.beam_size.z = {1}", 342 "unit" : "source.beam_size_unit", 227 SASINSTR_SRC_BEAMSIZE_Z = {"unit" : "beam_size_unit", 343 228 "storeas" : "float", 344 229 "attributes" : SASINSTR_SRC_BEAMSIZE_ATTR 345 230 } 346 SASINSTR_SRC_BEAMSIZE = {"attributes" : 347 {"name" : {"variable" : 348 "{0}.source.beam_size_name = \"{1}\""}}, 349 "variable" : None, 231 SASINSTR_SRC_BEAMSIZE = {"attributes" : {"name" : {}}, 350 232 "children" : {"x" : SASINSTR_SRC_BEAMSIZE_X, 351 233 "y" : SASINSTR_SRC_BEAMSIZE_Y, … … 353 235 } 354 236 } 355 SASINSTR_SRC_WL = {"variable" : "{0}.source.wavelength = {1}", 356 "unit" : "source.wavelength_unit", 357 "storeas" : "float", 358 "attributes" : 359 {"unit" : 360 {"variable" : "{0}.source.wavelength_unit = \"{1}\"", 361 "storeas" : "content" 362 }, 237 SASINSTR_SRC_WL = {"unit" : "wavelength_unit", 238 "storeas" : "float", 239 "attributes" : {"unit" :{}, 363 240 } 364 241 } 365 SASINSTR_SRC_WL_MIN = {"variable" : "{0}.source.wavelength_min = {1}", 366 "unit" : "source.wavelength_min_unit", 242 SASINSTR_SRC_WL_MIN = {"unit" : "wavelength_min_unit", 367 243 "storeas" : "float", 368 "attributes" : 369 {"unit" : 370 {"variable" : 371 "{0}.source.wavelength_min_unit = \"{1}\"", 372 "storeas" : "content" 373 }, 374 } 244 "attributes" : {"unit" :{"storeas" : "content"},} 375 245 } 376 SASINSTR_SRC_WL_MAX = {"variable" : "{0}.source.wavelength_max = {1}", 377 "unit" : "source.wavelength_max_unit", 246 SASINSTR_SRC_WL_MAX = {"unit" : "wavelength_max_unit", 378 247 "storeas" : "float", 379 "attributes" : 380 {"unit" : 381 {"variable" : 382 "{0}.source.wavelength_max_unit = \"{1}\"", 383 "storeas" : "content" 384 }, 385 } 248 "attributes" : {"unit" :{"storeas" : "content"},} 386 249 } 387 SASINSTR_SRC_WL_SPR = {"variable" : "{0}.source.wavelength_spread = {1}", 388 "unit" : "source.wavelength_spread_unit", 250 SASINSTR_SRC_WL_SPR = {"unit" : "wavelength_spread_unit", 389 251 "storeas" : "float", 390 "attributes" : 391 {"unit" : 392 {"variable" : 393 "{0}.source.wavelength_spread_unit = \"{1}\"", 394 "storeas" : "content" 395 }, 396 } 252 "attributes" : {"unit" : {"storeas" : "content"},} 397 253 } 398 SASINSTR_SRC = {"attributes" : {"name" : {"variable" : 399 "{0}.source.name = \"{1}\""}}, 400 "variable" : None, 401 "children" : {"radiation" : 402 {"variable" : 403 "{0}.source.radiation = \"{1}\""}, 254 SASINSTR_SRC = {"attributes" : {"name" : {}}, 255 "children" : {"radiation" : {}, 404 256 "beam_size" : SASINSTR_SRC_BEAMSIZE, 405 "beam_shape" : 406 {"variable" : 407 "{0}.source.beam_shape = \"{1}\""}, 257 "beam_shape" : {}, 408 258 "wavelength" : SASINSTR_SRC_WL, 409 259 "wavelength_min" : SASINSTR_SRC_WL_MIN, … … 412 262 }, 413 263 } 414 SASINSTR_COLL_APER_ATTR = {"unit" : {"variable" : "{0}.size_unit = \"{1}\"", 415 "storeas" : "content" 416 }, 417 } 418 SASINSTR_COLL_APER_X = {"variable" : "{0}.size.x = {1}", 419 "unit" : "size_unit", 264 SASINSTR_COLL_APER_ATTR = {"unit" : {}} 265 SASINSTR_COLL_APER_X = {"unit" : "size_unit", 420 266 "storeas" : "float", 421 267 "attributes" : SASINSTR_COLL_APER_ATTR 422 268 } 423 SASINSTR_COLL_APER_Y = {"variable" : "{0}.size.y = {1}", 424 "unit" : "size_unit", 269 SASINSTR_COLL_APER_Y = {"unit" : "size_unit", 425 270 "storeas" : "float", 426 271 "attributes" : SASINSTR_COLL_APER_ATTR 427 272 } 428 SASINSTR_COLL_APER_Z = {"variable" : "{0}.size.z = {1}", 429 "unit" : "size_unit", 273 SASINSTR_COLL_APER_Z = {"unit" : "size_unit", 430 274 "storeas" : "float", 431 275 "attributes" : SASINSTR_COLL_APER_ATTR 432 276 } 433 SASINSTR_COLL_APER_SIZE = {"attributes" : 434 {"unit" : {"variable" : 435 "{0}.size_unit = \"{1}\""}}, 277 SASINSTR_COLL_APER_SIZE = {"attributes" : {"unit" : {}}, 436 278 "children" : {"storeas" : "float", 437 279 "x" : SASINSTR_COLL_APER_X, … … 441 283 } 442 284 SASINSTR_COLL_APER_DIST = {"storeas" : "float", 443 "attributes" : 444 {"storeas" : "content", 445 "unit" : {"variable" : 446 "{0}.distance_unit = \"{1}\""} 447 }, 448 "variable" : "{0}.distance = {1}", 285 "attributes" : {"unit" : {}}, 449 286 "unit" : "distance_unit", 450 287 } 451 SASINSTR_COLL_APER = {"variable" : None, 452 "attributes" : {"name" : {"variable" : 453 "{0}.name = \"{1}\""}, 454 "type" : {"variable" : 455 "{0}.type = \"{1}\""}, 456 }, 288 SASINSTR_COLL_APER = {"attributes" : {"name" : {}, "type" : {}, }, 457 289 "children" : {"size" : SASINSTR_COLL_APER_SIZE, 458 290 "distance" : SASINSTR_COLL_APER_DIST 459 291 } 460 292 } 461 SASINSTR_COLL = {"attributes" : 462 {"name" : {"variable" : "{0}.name = \"{1}\""}}, 463 "variable" : None, 293 SASINSTR_COLL = {"attributes" : {"name" : {}}, 464 294 "children" : 465 {"length" : 466 {"variable" : "{0}.length = {1}", 467 "unit" : "length_unit", 468 "storeas" : "float", 469 "attributes" : 470 {"storeas" : "content", 471 "unit" : {"variable" : "{0}.length_unit = \"{1}\""} 472 }, 473 }, 474 "aperture" : SASINSTR_COLL_APER, 475 }, 295 {"length" : 296 {"unit" : "length_unit", 297 "storeas" : "float", 298 "attributes" : {"storeas" : "content", "unit" : {}}, 299 }, 300 "aperture" : SASINSTR_COLL_APER, 301 }, 476 302 } 477 SASINSTR_DET_SDD = {" variable" : "{0}.distance = {1}",303 SASINSTR_DET_SDD = {"storeas" : "float", 478 304 "unit" : "distance_unit", 479 "attributes" : 480 {"unit" : 481 {"variable" : "{0}.distance_unit = \"{1}\"", 482 "storeas" : "content" 483 } 484 }, 305 "attributes" : {"unit" :{}}, 485 306 } 486 SASINSTR_DET_OFF_ATTR = {"unit" : {"variable" : "{0}.offset_unit = \"{1}\"", 487 "storeas" : "content" 488 }, 489 } 490 SASINSTR_DET_OFF_X = {"variable" : "{0}.offset.x = {1}", 307 SASINSTR_DET_OFF_ATTR = {"unit" : {"storeas" : "content" }} 308 SASINSTR_DET_OFF_X = {"storeas" : "float", 491 309 "unit" : "offset_unit", 492 310 "attributes" : SASINSTR_DET_OFF_ATTR 493 311 } 494 SASINSTR_DET_OFF_Y = {" variable" : "{0}.offset.y = {1}",312 SASINSTR_DET_OFF_Y = {"storeas" : "float", 495 313 "unit" : "offset_unit", 496 314 "attributes" : SASINSTR_DET_OFF_ATTR 497 315 } 498 SASINSTR_DET_OFF_Z = {" variable" : "{0}.offset.z = {1}",316 SASINSTR_DET_OFF_Z = {"storeas" : "float", 499 317 "unit" : "offset_unit", 500 318 "attributes" : SASINSTR_DET_OFF_ATTR 501 319 } 502 SASINSTR_DET_OFF = {"variable" : None, 503 "children" : {"x" : SASINSTR_DET_OFF_X, 320 SASINSTR_DET_OFF = {"children" : {"x" : SASINSTR_DET_OFF_X, 504 321 "y" : SASINSTR_DET_OFF_Y, 505 322 "z" : SASINSTR_DET_OFF_Z, 506 323 } 507 324 } 508 SASINSTR_DET_OR_ATTR = {"unit" : "{0}.orientation_unit = \"{1}\"", 509 "storeas" : "content" 510 } 511 SASINSTR_DET_OR_ROLL = {"variable" : "{0}.orientation.x = {1}", 325 SASINSTR_DET_OR_ATTR = {} 326 SASINSTR_DET_OR_ROLL = {"storeas" : "float", 512 327 "unit" : "orientation_unit", 513 328 "attributes" : SASINSTR_DET_OR_ATTR 514 329 } 515 SASINSTR_DET_OR_PITCH = {" variable" : "{0}.orientation.y = {1}",330 SASINSTR_DET_OR_PITCH = {"storeas" : "float", 516 331 "unit" : "orientation_unit", 517 332 "attributes" : SASINSTR_DET_OR_ATTR 518 333 } 519 SASINSTR_DET_OR_YAW = {" variable" : "{0}.orientation.z = {1}",334 SASINSTR_DET_OR_YAW = {"storeas" : "float", 520 335 "unit" : "orientation_unit", 521 336 "attributes" : SASINSTR_DET_OR_ATTR 522 337 } 523 SASINSTR_DET_OR = {"variable" : None, 524 "children" : {"roll" : SASINSTR_DET_OR_ROLL, 338 SASINSTR_DET_OR = {"children" : {"roll" : SASINSTR_DET_OR_ROLL, 525 339 "pitch" : SASINSTR_DET_OR_PITCH, 526 340 "yaw" : SASINSTR_DET_OR_YAW, 527 341 } 528 342 } 529 SASINSTR_DET_BC_X = {" variable" : "{0}.beam_center.x = {1}",343 SASINSTR_DET_BC_X = {"storeas" : "float", 530 344 "unit" : "beam_center_unit", 531 "attributes" : 532 {"unit" : "{0}.beam_center_unit = \"{1}\"", 533 "storeas" : "content" 534 } 535 } 536 SASINSTR_DET_BC_Y = {"variable" : "{0}.beam_center.y = {1}", 345 "attributes" : {"storeas" : "content"} 346 } 347 SASINSTR_DET_BC_Y = {"storeas" : "float", 537 348 "unit" : "beam_center_unit", 538 "attributes" : 539 {"unit" : "{0}.beam_center_unit = \"{1}\"", 540 "storeas" : "content" 541 } 542 } 543 SASINSTR_DET_BC_Z = {"variable" : "{0}.beam_center.z = {1}", 349 "attributes" : {"storeas" : "content"} 350 } 351 SASINSTR_DET_BC_Z = {"storeas" : "float", 544 352 "unit" : "beam_center_unit", 545 "attributes" : 546 {"unit" : "{0}.beam_center_unit = \"{1}\"", 547 "storeas" : "content" 548 } 549 } 550 SASINSTR_DET_BC = {"variable" : None, 551 "children" : {"x" : SASINSTR_DET_BC_X, 353 "attributes" : {"storeas" : "content"} 354 } 355 SASINSTR_DET_BC = {"children" : {"x" : SASINSTR_DET_BC_X, 552 356 "y" : SASINSTR_DET_BC_Y, 553 "z" : SASINSTR_DET_BC_Z, 554 } 555 } 556 SASINSTR_DET_PIXEL_X = {"variable" : "{0}.pixel_size.x = {1}", 357 "z" : SASINSTR_DET_BC_Z,} 358 } 359 SASINSTR_DET_PIXEL_X = {"storeas" : "float", 557 360 "unit" : "pixel_size_unit", 558 "attributes" : 559 {"unit" : "{0}.pixel_size_unit = \"{1}\"", 560 "storeas" : "content" 561 } 562 } 563 SASINSTR_DET_PIXEL_Y = {"variable" : "{0}.pixel_size.y = {1}", 361 "attributes" : {"storeas" : "content" } 362 } 363 SASINSTR_DET_PIXEL_Y = {"storeas" : "float", 564 364 "unit" : "pixel_size_unit", 565 "attributes" : 566 {"unit" : "{0}.pixel_size_unit = \"{1}\"", 567 "storeas" : "content" 568 } 569 } 570 SASINSTR_DET_PIXEL_Z = {"variable" : "{0}.pixel_size.z = {1}", 365 "attributes" : {"storeas" : "content"} 366 } 367 SASINSTR_DET_PIXEL_Z = {"storeas" : "float", 571 368 "unit" : "pixel_size_unit", 572 "attributes" : 573 {"unit" : "{0}.pixel_size_unit = \"{1}\"", 574 "storeas" : "content" 575 } 576 } 577 SASINSTR_DET_PIXEL = {"variable" : None, 578 "children" : {"x" : SASINSTR_DET_PIXEL_X, 369 "attributes" : {"storeas" : "content"} 370 } 371 SASINSTR_DET_PIXEL = {"children" : {"x" : SASINSTR_DET_PIXEL_X, 579 372 "y" : SASINSTR_DET_PIXEL_Y, 580 373 "z" : SASINSTR_DET_PIXEL_Z, 581 374 } 582 375 } 583 SASINSTR_DET_SLIT = {" variable" : "{0}.slit_length = {1}",376 SASINSTR_DET_SLIT = {"storeas" : "float", 584 377 "unit" : "slit_length_unit", 585 "attributes" : 586 {"unit" : 587 {"variable" : "{0}.slit_length_unit = \"{1}\"", 588 "storeas" : "content" 589 } 590 } 591 } 592 SASINSTR_DET = {"storeas" : "float", 593 "variable" : None, 594 "attributes" : {"name" : 595 {"storeas" : "content", 596 "variable" : "{0}.name = \"{1}\"", 597 } 598 }, 599 "children" : {"name" : {"storeas" : "content", 600 "variable" : "{0}.name = \"{1}\"", 601 }, 378 "attributes" : {"unit" : {}} 379 } 380 SASINSTR_DET = {"attributes" : {"name" : {"storeas" : "content"}}, 381 "children" : {"name" : {"storeas" : "content"}, 602 382 "SDD" : SASINSTR_DET_SDD, 603 383 "offset" : SASINSTR_DET_OFF, … … 608 388 } 609 389 } 610 SASINSTR = {"variable" : None, 611 "children" : 612 {"variable" : None, 613 "name" : {"variable" : "{0}.instrument = \"{1}\""}, 390 SASINSTR = {"children" : 391 {"name" : {}, 614 392 "SASsource" : SASINSTR_SRC, 615 393 "SAScollimation" : SASINSTR_COLL, … … 619 397 CANSAS_FORMAT = {"SASentry" : 620 398 {"units_optional" : True, 621 "variable" : None,622 399 "storeas" : "content", 623 "attributes" : {"name" : 624 {"variable" : 625 "{0}.run_name[\"{3}\"] = \"{1}\""}}, 400 "attributes" : {"name" : {}}, 626 401 "children" : {"Title" : TITLE, 627 402 "Run" : RUN, … … 644 419 645 420 current_level = '' 646 ns_variable = ''647 421 ns_datatype = '' 648 422 ns_optional = True … … 650 424 def __init__(self): 651 425 self.current_level = {} 652 self.ns_variable = ''653 426 self.ns_datatype = "content" 654 427 self.ns_optional = True -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r83b6408 r5f26aa4 14 14 15 15 import logging 16 import numpy 16 import numpy as np 17 17 import os 18 18 import sys … … 20 20 import inspect 21 21 # For saving individual sections of data 22 from sas.sascalc.dataloader.data_info import Data1D 23 from sas.sascalc.dataloader.data_info import Collimation 24 from sas.sascalc.dataloader.data_info import TransmissionSpectrum 25 from sas.sascalc.dataloader.data_info import Detector 26 from sas.sascalc.dataloader.data_info import Process 27 from sas.sascalc.dataloader.data_info import Aperture 22 from sas.sascalc.dataloader.data_info import Data1D, DataInfo, plottable_1D 23 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector, Process, Aperture 24 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable as combine_data 28 25 import sas.sascalc.dataloader.readers.xml_reader as xml_reader 29 26 from sas.sascalc.dataloader.readers.xml_reader import XMLreader 30 from sas.sascalc.dataloader.readers.cansas_constants import CansasConstants 27 from sas.sascalc.dataloader.readers.cansas_constants import CansasConstants, CurrentLevel 31 28 32 29 # The following 2 imports *ARE* used. Do not remove either. … … 34 31 from xml.dom.minidom import parseString 35 32 36 ## TODO: Refactor to load multiple <SASData> as separate Data1D objects37 ## TODO: Refactor to allow invalid XML, but give a useful warning when loaded38 39 _ZERO = 1e-1640 33 PREPROCESS = "xmlpreprocess" 41 34 ENCODING = "encoding" 42 35 RUN_NAME_DEFAULT = "None" 36 INVALID_SCHEMA_PATH_1_1 = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_1.xsd" 37 INVALID_SCHEMA_PATH_1_0 = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_0.xsd" 38 INVALID_XML = "\n\nThe loaded xml file, {0} does not fully meet the CanSAS v1.x specification. SasView loaded " + \ 39 "as much of the data as possible.\n\n" 43 40 HAS_CONVERTER = True 44 41 try: … … 52 49 ALLOW_ALL = True 53 50 54 # DO NOT REMOVE Called by outside packages:55 # sas.sasgui.perspectives.invariant.invariant_state56 # sas.sasgui.perspectives.fitting.pagestate57 def get_content(location, node):58 """59 Get the first instance of the content of a xpath location.60 61 :param location: xpath location62 :param node: node to start at63 64 :return: Element, or None65 """66 nodes = node.xpath(location,67 namespaces={'ns': CANSAS_NS.get("1.0").get("ns")})68 if len(nodes) > 0:69 return nodes[0]70 else:71 return None72 73 # DO NOT REMOVE Called by outside packages:74 # sas.sasgui.perspectives.fitting.pagestate75 def write_node(doc, parent, name, value, attr=None):76 """77 :param doc: document DOM78 :param parent: parent node79 :param name: tag of the element80 :param value: value of the child text node81 :param attr: attribute dictionary82 83 :return: True if something was appended, otherwise False84 """85 if attr is None:86 attr = {}87 if value is not None:88 node = doc.createElement(name)89 node.appendChild(doc.createTextNode(str(value)))90 for item in attr:91 node.setAttribute(item, attr[item])92 parent.appendChild(node)93 return True94 return False95 96 51 class Reader(XMLreader): 97 52 """ … … 101 56 The CanSAS reader requires PyXML 0.8.4 or later. 102 57 """ 103 ## CanSAS version - defaults to version 1.058 ## CanSAS version - defaults to version 1.0 104 59 cansas_version = "1.0" 105 60 base_ns = "{cansas1d/1.0}" 61 cansas_defaults = None 62 type_name = "canSAS" 63 invalid = True 64 ## Log messages and errors 106 65 logging = None 107 errors = None 108 type_name = "canSAS" 66 errors = set() 67 ## Namespace hierarchy for current xml_file object 68 names = None 69 ns_list = None 70 ## Temporary storage location for loading multiple data sets in a single file 71 current_datainfo = None 72 current_dataset = None 73 current_data1d = None 74 data = None 75 ## List of data1D objects to be sent back to SasView 76 output = None 109 77 ## Wildcards 110 78 type = ["XML files (*.xml)|*.xml", "SasView Save Files (*.svs)|*.svs"] … … 114 82 allow_all = True 115 83 116 def __init__(self): 117 ## List of errors 118 self.errors = set() 84 def reset_state(self): 85 """ 86 Resets the class state to a base case when loading a new data file so previous 87 data files do not appear a second time 88 """ 89 self.current_datainfo = None 90 self.current_dataset = None 91 self.current_data1d = None 92 self.data = [] 93 self.process = Process() 94 self.transspectrum = TransmissionSpectrum() 95 self.aperture = Aperture() 96 self.collimation = Collimation() 97 self.detector = Detector() 98 self.names = [] 99 self.cansas_defaults = {} 100 self.output = [] 101 self.ns_list = None 119 102 self.logging = [] 120 103 self.encoding = None 104 105 def read(self, xml_file, schema_path="", invalid=True): 106 """ 107 Validate and read in an xml_file file in the canSAS format. 108 109 :param xml_file: A canSAS file path in proper XML format 110 :param schema_path: A file path to an XML schema to validate the xml_file against 111 """ 112 # For every file loaded, reset everything to a base state 113 self.reset_state() 114 self.invalid = invalid 115 # Check that the file exists 116 if os.path.isfile(xml_file): 117 basename, extension = os.path.splitext(os.path.basename(xml_file)) 118 # If the file type is not allowed, return nothing 119 if extension in self.ext or self.allow_all: 120 # Get the file location of 121 self.load_file_and_schema(xml_file, schema_path) 122 self.add_data_set() 123 # Try to load the file, but raise an error if unable to. 124 # Check the file matches the XML schema 125 try: 126 self.is_cansas(extension) 127 self.invalid = False 128 # Get each SASentry from XML file and add it to a list. 129 entry_list = self.xmlroot.xpath( 130 '/ns:SASroot/ns:SASentry', 131 namespaces={'ns': self.cansas_defaults.get("ns")}) 132 self.names.append("SASentry") 133 134 # Get all preprocessing events and encoding 135 self.set_processing_instructions() 136 137 # Parse each <SASentry> item 138 for entry in entry_list: 139 # Create a new DataInfo object for every <SASentry> 140 141 142 # Set the file name and then parse the entry. 143 self.current_datainfo.filename = basename + extension 144 self.current_datainfo.meta_data["loader"] = "CanSAS XML 1D" 145 self.current_datainfo.meta_data[PREPROCESS] = \ 146 self.processing_instructions 147 148 # Parse the XML SASentry 149 self._parse_entry(entry) 150 # Combine datasets with datainfo 151 self.add_data_set() 152 except RuntimeError: 153 # If the file does not match the schema, raise this error 154 invalid_xml = self.find_invalid_xml() 155 invalid_xml = INVALID_XML.format(basename + extension) + invalid_xml 156 self.errors.add(invalid_xml) 157 # Try again with an invalid CanSAS schema, that requires only a data set in each 158 base_name = xml_reader.__file__ 159 base_name = base_name.replace("\\", "/") 160 base = base_name.split("/sas/")[0] 161 if self.cansas_version == "1.1": 162 invalid_schema = INVALID_SCHEMA_PATH_1_1.format(base, self.cansas_defaults.get("schema")) 163 else: 164 invalid_schema = INVALID_SCHEMA_PATH_1_0.format(base, self.cansas_defaults.get("schema")) 165 self.set_schema(invalid_schema) 166 try: 167 if self.invalid: 168 if self.is_cansas(): 169 self.output = self.read(xml_file, invalid_schema, False) 170 else: 171 raise RuntimeError 172 else: 173 raise RuntimeError 174 except RuntimeError: 175 x = np.zeros(1) 176 y = np.zeros(1) 177 self.current_data1d = Data1D(x,y) 178 self.current_data1d.errors = self.errors 179 return [self.current_data1d] 180 else: 181 self.output.append("Not a valid file path.") 182 # Return a list of parsed entries that dataloader can manage 183 return self.output 184 185 def _parse_entry(self, dom): 186 """ 187 Parse a SASEntry - new recursive method for parsing the dom of 188 the CanSAS data format. This will allow multiple data files 189 and extra nodes to be read in simultaneously. 190 191 :param dom: dom object with a namespace base of names 192 """ 193 194 frm = inspect.stack()[1] 195 if not self._is_call_local(frm): 196 self.reset_state() 197 self.add_data_set() 198 self.names.append("SASentry") 199 self.parent_class = "SASentry" 200 self._check_for_empty_data() 201 self.base_ns = "{0}{1}{2}".format("{", \ 202 CANSAS_NS.get(self.cansas_version).get("ns"), "}") 203 tagname = '' 204 tagname_original = '' 205 206 # Go through each child in the parent element 207 for node in dom: 208 attr = node.attrib 209 name = attr.get("name", "") 210 type = attr.get("type", "") 211 # Get the element name and set the current names level 212 tagname = node.tag.replace(self.base_ns, "") 213 tagname_original = tagname 214 # Skip this iteration when loading in save state information 215 if tagname == "fitting_plug_in" or tagname == "pr_inversion" or tagname == "invariant": 216 continue 217 218 # Get where to store content 219 self.names.append(tagname_original) 220 self.ns_list = CONSTANTS.iterate_namespace(self.names) 221 # If the element is a child element, recurse 222 if len(node.getchildren()) > 0: 223 self.parent_class = tagname_original 224 if tagname == 'SASdata': 225 self._initialize_new_data_set() 226 ## Recursion step to access data within the group 227 self._parse_entry(node) 228 if tagname == "SASsample": 229 self.current_datainfo.sample.name = name 230 elif tagname == "beam_size": 231 self.current_datainfo.source.beam_size_name = name 232 elif tagname == "SAScollimation": 233 self.collimation.name = name 234 elif tagname == "aperture": 235 self.aperture.name = name 236 self.aperture.type = type 237 self.add_intermediate() 238 else: 239 data_point, unit = self._get_node_value(node, tagname) 240 241 ## If this is a dataset, store the data appropriately 242 if tagname == 'Run': 243 self.current_datainfo.run_name[data_point] = name 244 self.current_datainfo.run.append(data_point) 245 elif tagname == 'Title': 246 self.current_datainfo.title = data_point 247 elif tagname == 'SASnote': 248 self.current_datainfo.notes.append(data_point) 249 250 ## I and Q Data 251 elif tagname == 'I': 252 self.current_dataset.yaxis("Intensity", unit) 253 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 254 elif tagname == 'Idev': 255 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 256 elif tagname == 'Q': 257 self.current_dataset.xaxis("Q", unit) 258 self.current_dataset.x = np.append(self.current_dataset.x, data_point) 259 elif tagname == 'Qdev': 260 self.current_dataset.dx = np.append(self.current_dataset.dx, data_point) 261 elif tagname == 'dQw': 262 self.current_dataset.dxw = np.append(self.current_dataset.dxw, data_point) 263 elif tagname == 'dQl': 264 self.current_dataset.dxl = np.append(self.current_dataset.dxl, data_point) 265 elif tagname == 'Qmean': 266 pass 267 elif tagname == 'Shadowfactor': 268 pass 269 270 ## Sample Information 271 elif tagname == 'ID' and self.parent_class == 'SASsample': 272 self.current_datainfo.sample.ID = data_point 273 elif tagname == 'Title' and self.parent_class == 'SASsample': 274 self.current_datainfo.sample.name = data_point 275 elif tagname == 'thickness' and self.parent_class == 'SASsample': 276 self.current_datainfo.sample.thickness = data_point 277 self.current_datainfo.sample.thickness_unit = unit 278 elif tagname == 'transmission' and self.parent_class == 'SASsample': 279 self.current_datainfo.sample.transmission = data_point 280 elif tagname == 'temperature' and self.parent_class == 'SASsample': 281 self.current_datainfo.sample.temperature = data_point 282 self.current_datainfo.sample.temperature_unit = unit 283 elif tagname == 'details' and self.parent_class == 'SASsample': 284 self.current_datainfo.sample.details.append(data_point) 285 elif tagname == 'x' and self.parent_class == 'position': 286 self.current_datainfo.sample.position.x = data_point 287 self.current_datainfo.sample.position_unit = unit 288 elif tagname == 'y' and self.parent_class == 'position': 289 self.current_datainfo.sample.position.y = data_point 290 self.current_datainfo.sample.position_unit = unit 291 elif tagname == 'z' and self.parent_class == 'position': 292 self.current_datainfo.sample.position.z = data_point 293 self.current_datainfo.sample.position_unit = unit 294 elif tagname == 'roll' and self.parent_class == 'orientation' and 'SASsample' in self.names: 295 self.current_datainfo.sample.orientation.x = data_point 296 self.current_datainfo.sample.orientation_unit = unit 297 elif tagname == 'pitch' and self.parent_class == 'orientation' and 'SASsample' in self.names: 298 self.current_datainfo.sample.orientation.y = data_point 299 self.current_datainfo.sample.orientation_unit = unit 300 elif tagname == 'yaw' and self.parent_class == 'orientation' and 'SASsample' in self.names: 301 self.current_datainfo.sample.orientation.z = data_point 302 self.current_datainfo.sample.orientation_unit = unit 303 304 ## Instrumental Information 305 elif tagname == 'name' and self.parent_class == 'SASinstrument': 306 self.current_datainfo.instrument = data_point 307 ## Detector Information 308 elif tagname == 'name' and self.parent_class == 'SASdetector': 309 self.detector.name = data_point 310 elif tagname == 'SDD' and self.parent_class == 'SASdetector': 311 self.detector.distance = data_point 312 self.detector.distance_unit = unit 313 elif tagname == 'slit_length' and self.parent_class == 'SASdetector': 314 self.detector.slit_length = data_point 315 self.detector.slit_length_unit = unit 316 elif tagname == 'x' and self.parent_class == 'offset': 317 self.detector.offset.x = data_point 318 self.detector.offset_unit = unit 319 elif tagname == 'y' and self.parent_class == 'offset': 320 self.detector.offset.y = data_point 321 self.detector.offset_unit = unit 322 elif tagname == 'z' and self.parent_class == 'offset': 323 self.detector.offset.z = data_point 324 self.detector.offset_unit = unit 325 elif tagname == 'x' and self.parent_class == 'beam_center': 326 self.detector.beam_center.x = data_point 327 self.detector.beam_center_unit = unit 328 elif tagname == 'y' and self.parent_class == 'beam_center': 329 self.detector.beam_center.y = data_point 330 self.detector.beam_center_unit = unit 331 elif tagname == 'z' and self.parent_class == 'beam_center': 332 self.detector.beam_center.z = data_point 333 self.detector.beam_center_unit = unit 334 elif tagname == 'x' and self.parent_class == 'pixel_size': 335 self.detector.pixel_size.x = data_point 336 self.detector.pixel_size_unit = unit 337 elif tagname == 'y' and self.parent_class == 'pixel_size': 338 self.detector.pixel_size.y = data_point 339 self.detector.pixel_size_unit = unit 340 elif tagname == 'z' and self.parent_class == 'pixel_size': 341 self.detector.pixel_size.z = data_point 342 self.detector.pixel_size_unit = unit 343 elif tagname == 'roll' and self.parent_class == 'orientation' and 'SASdetector' in self.names: 344 self.detector.orientation.x = data_point 345 self.detector.orientation_unit = unit 346 elif tagname == 'pitch' and self.parent_class == 'orientation' and 'SASdetector' in self.names: 347 self.detector.orientation.y = data_point 348 self.detector.orientation_unit = unit 349 elif tagname == 'yaw' and self.parent_class == 'orientation' and 'SASdetector' in self.names: 350 self.detector.orientation.z = data_point 351 self.detector.orientation_unit = unit 352 ## Collimation and Aperture 353 elif tagname == 'length' and self.parent_class == 'SAScollimation': 354 self.collimation.length = data_point 355 self.collimation.length_unit = unit 356 elif tagname == 'name' and self.parent_class == 'SAScollimation': 357 self.collimation.name = data_point 358 elif tagname == 'distance' and self.parent_class == 'aperture': 359 self.aperture.distance = data_point 360 self.aperture.distance_unit = unit 361 elif tagname == 'x' and self.parent_class == 'size': 362 self.aperture.size.x = data_point 363 self.collimation.size_unit = unit 364 elif tagname == 'y' and self.parent_class == 'size': 365 self.aperture.size.y = data_point 366 self.collimation.size_unit = unit 367 elif tagname == 'z' and self.parent_class == 'size': 368 self.aperture.size.z = data_point 369 self.collimation.size_unit = unit 370 371 ## Process Information 372 elif tagname == 'name' and self.parent_class == 'SASprocess': 373 self.process.name = data_point 374 elif tagname == 'description' and self.parent_class == 'SASprocess': 375 self.process.description = data_point 376 elif tagname == 'date' and self.parent_class == 'SASprocess': 377 try: 378 self.process.date = datetime.datetime.fromtimestamp(data_point) 379 except: 380 self.process.date = data_point 381 elif tagname == 'SASprocessnote': 382 self.process.notes.append(data_point) 383 elif tagname == 'term' and self.parent_class == 'SASprocess': 384 unit = attr.get("unit", "") 385 dic = {} 386 dic["name"] = name 387 dic["value"] = data_point 388 dic["unit"] = unit 389 self.process.term.append(dic) 390 391 ## Transmission Spectrum 392 elif tagname == 'T' and self.parent_class == 'Tdata': 393 self.transspectrum.transmission = np.append(self.transspectrum.transmission, data_point) 394 self.transspectrum.transmission_unit = unit 395 elif tagname == 'Tdev' and self.parent_class == 'Tdata': 396 self.transspectrum.transmission_deviation = np.append(self.transspectrum.transmission_deviation, data_point) 397 self.transspectrum.transmission_deviation_unit = unit 398 elif tagname == 'Lambda' and self.parent_class == 'Tdata': 399 self.transspectrum.wavelength = np.append(self.transspectrum.wavelength, data_point) 400 self.transspectrum.wavelength_unit = unit 401 402 ## Source Information 403 elif tagname == 'wavelength' and (self.parent_class == 'SASsource' or self.parent_class == 'SASData'): 404 self.current_datainfo.source.wavelength = data_point 405 self.current_datainfo.source.wavelength_unit = unit 406 elif tagname == 'wavelength_min' and self.parent_class == 'SASsource': 407 self.current_datainfo.source.wavelength_min = data_point 408 self.current_datainfo.source.wavelength_min_unit = unit 409 elif tagname == 'wavelength_max' and self.parent_class == 'SASsource': 410 self.current_datainfo.source.wavelength_max = data_point 411 self.current_datainfo.source.wavelength_max_unit = unit 412 elif tagname == 'wavelength_spread' and self.parent_class == 'SASsource': 413 self.current_datainfo.source.wavelength_spread = data_point 414 self.current_datainfo.source.wavelength_spread_unit = unit 415 elif tagname == 'x' and self.parent_class == 'beam_size': 416 self.current_datainfo.source.beam_size.x = data_point 417 self.current_datainfo.source.beam_size_unit = unit 418 elif tagname == 'y' and self.parent_class == 'beam_size': 419 self.current_datainfo.source.beam_size.y = data_point 420 self.current_datainfo.source.beam_size_unit = unit 421 elif tagname == 'z' and self.parent_class == 'pixel_size': 422 self.current_datainfo.source.data_point.z = data_point 423 self.current_datainfo.source.beam_size_unit = unit 424 elif tagname == 'radiation' and self.parent_class == 'SASsource': 425 self.current_datainfo.source.radiation = data_point 426 elif tagname == 'beam_shape' and self.parent_class == 'SASsource': 427 self.current_datainfo.source.beam_shape = data_point 428 429 ## Everything else goes in meta_data 430 else: 431 new_key = self._create_unique_key(self.current_datainfo.meta_data, tagname) 432 self.current_datainfo.meta_data[new_key] = data_point 433 434 self.names.remove(tagname_original) 435 length = 0 436 if len(self.names) > 1: 437 length = len(self.names) - 1 438 self.parent_class = self.names[length] 439 if not self._is_call_local(frm): 440 self.add_data_set() 441 empty = None 442 if self.output[0].dx is not None: 443 self.output[0].dxl = np.empty(0) 444 self.output[0].dxw = np.empty(0) 445 else: 446 self.output[0].dx = np.empty(0) 447 return self.output[0], empty 448 449 450 def _is_call_local(self, frm=""): 451 """ 452 453 :return: 454 """ 455 if frm == "": 456 frm = inspect.stack()[1] 457 mod_name = frm[1].replace("\\", "/").replace(".pyc", "") 458 mod_name = mod_name.replace(".py", "") 459 mod = mod_name.split("sas/") 460 mod_name = mod[1] 461 if mod_name != "sascalc/dataloader/readers/cansas_reader": 462 return False 463 return True 121 464 122 465 def is_cansas(self, ext="xml"): … … 134 477 if ext == "svs": 135 478 return True 136 r eturn False479 raise RuntimeError 137 480 138 481 def load_file_and_schema(self, xml_file, schema_path=""): 139 482 """ 140 Loads the file and associates a schema, if a known schemaexists483 Loads the file and associates a schema, if a schema is passed in or if one already exists 141 484 142 485 :param xml_file: The xml file path sent to Reader.read 486 :param schema_path: The path to a schema associated with the xml_file, or find one based on the file 143 487 """ 144 488 base_name = xml_reader.__file__ … … 151 495 152 496 # Generic values for the cansas file based on the version 153 cansas_defaults = CANSAS_NS.get(self.cansas_version, "1.0")497 self.cansas_defaults = CANSAS_NS.get(self.cansas_version, "1.0") 154 498 if schema_path == "": 155 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/{1}".format \156 (base, cansas_defaults.get("schema")).replace("\\", "/")499 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/{1}".format \ 500 (base, self.cansas_defaults.get("schema")).replace("\\", "/") 157 501 158 502 # Link a schema to the XML file. 159 503 self.set_schema(schema_path) 160 return cansas_defaults 161 162 ## TODO: Test loading invalid CanSAS XML files and see if this works 163 ## TODO: Once works, try adding a warning that the data is invalid 164 def read(self, xml_file, schema_path=""): 165 """ 166 Validate and read in an xml_file file in the canSAS format. 167 168 :param xml_file: A canSAS file path in proper XML format 169 """ 170 # output - Final list of Data1D objects 171 output = [] 172 # ns - Namespace hierarchy for current xml_file object 173 ns_list = [] 174 175 # Check that the file exists 176 if os.path.isfile(xml_file): 177 basename = os.path.basename(xml_file) 178 _, extension = os.path.splitext(basename) 179 # If the file type is not allowed, return nothing 180 if extension in self.ext or self.allow_all: 181 # Get the file location of 182 cansas_defaults = self.load_file_and_schema(xml_file, schema_path) 183 184 # Try to load the file, but raise an error if unable to. 185 # Check the file matches the XML schema 186 try: 187 if self.is_cansas(extension): 188 # Get each SASentry from XML file and add it to a list. 189 entry_list = self.xmlroot.xpath( 190 '/ns:SASroot/ns:SASentry', 191 namespaces={'ns': cansas_defaults.get("ns")}) 192 ns_list.append("SASentry") 193 194 # If multiple files, modify the name for each is unique 195 increment = 0 196 # Parse each SASentry item 197 for entry in entry_list: 198 # Define a new Data1D object with zeroes for 199 # x_vals and y_vals 200 data1d = Data1D(numpy.empty(0), numpy.empty(0), 201 numpy.empty(0), numpy.empty(0)) 202 data1d.dxl = numpy.empty(0) 203 data1d.dxw = numpy.empty(0) 204 205 # If more than one SASentry, increment each in order 206 name = basename 207 if len(entry_list) - 1 > 0: 208 name += "_{0}".format(increment) 209 increment += 1 210 211 # Set the Data1D name and then parse the entry. 212 # The entry is appended to a list of entry values 213 data1d.filename = name 214 data1d.meta_data["loader"] = "CanSAS 1D" 215 216 # Get all preprocessing events and encoding 217 self.set_processing_instructions() 218 data1d.meta_data[PREPROCESS] = \ 219 self.processing_instructions 220 221 # Parse the XML file 222 return_value, extras = \ 223 self._parse_entry(entry, ns_list, data1d) 224 del extras[:] 225 226 return_value = self._final_cleanup(return_value) 227 output.append(return_value) 228 else: 229 raise RuntimeError, "Invalid XML at: {0}".format(\ 230 self.find_invalid_xml()) 231 except: 232 # If the file does not match the schema, raise this error 233 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid.xsd" 234 invalid_xml = self.find_invalid_xml() 235 invalid_xml = "\n\nThe loaded xml file does not fully meet the CanSAS v1.x specification. SasView " + \ 236 "loaded as much of the data as possible.\n\n" + invalid_xml 237 self.errors.add(invalid_xml) 238 self.set_schema(schema_path) 239 if self.is_cansas(): 240 output = self.read(xml_file, schema_path) 241 else: 242 raise RuntimeError, "%s cannot be read" % xml_file 243 return output 244 # Return a list of parsed entries that dataloader can manage 245 return None 246 247 def _final_cleanup(self, data1d): 504 505 def add_data_set(self): 506 """ 507 Adds the current_dataset to the list of outputs after preforming final processing on the data and then calls a 508 private method to generate a new data set. 509 510 :param key: NeXus group name for current tree level 511 """ 512 513 if self.current_datainfo and self.current_dataset: 514 self._final_cleanup() 515 self.data = [] 516 self.current_datainfo = DataInfo() 517 518 def _initialize_new_data_set(self, parent_list=None): 519 """ 520 A private class method to generate a new 1D data object. 521 Outside methods should call add_data_set() to be sure any existing data is stored properly. 522 523 :param parent_list: List of names of parent elements 524 """ 525 526 if parent_list is None: 527 parent_list = [] 528 x = np.array(0) 529 y = np.array(0) 530 self.current_dataset = plottable_1D(x, y) 531 532 def add_intermediate(self): 533 """ 534 This method stores any intermediate objects within the final data set after fully reading the set. 535 536 :param parent: The NXclass name for the h5py Group object that just finished being processed 537 """ 538 539 if self.parent_class == 'SASprocess': 540 self.current_datainfo.process.append(self.process) 541 self.process = Process() 542 elif self.parent_class == 'SASdetector': 543 self.current_datainfo.detector.append(self.detector) 544 self.detector = Detector() 545 elif self.parent_class == 'SAStransmission_spectrum': 546 self.current_datainfo.trans_spectrum.append(self.transspectrum) 547 self.transspectrum = TransmissionSpectrum() 548 elif self.parent_class == 'SAScollimation': 549 self.current_datainfo.collimation.append(self.collimation) 550 self.collimation = Collimation() 551 elif self.parent_class == 'aperture': 552 self.collimation.aperture.append(self.aperture) 553 self.aperture = Aperture() 554 elif self.parent_class == 'SASdata': 555 self._check_for_empty_resolution() 556 self.data.append(self.current_dataset) 557 558 def _final_cleanup(self): 248 559 """ 249 560 Final cleanup of the Data1D object to be sure it has all the 250 561 appropriate information needed for perspectives 251 252 :param data1d: Data1D object that has been populated 253 """ 254 # Final cleanup 255 # Remove empty nodes, verify array sizes are correct 562 """ 563 564 ## Append errors to dataset and reset class errors 565 self.current_datainfo.errors = set() 256 566 for error in self.errors: 257 data1d.errors.append(error)567 self.current_datainfo.errors.add(error) 258 568 self.errors.clear() 259 numpy.trim_zeros(data1d.x) 260 numpy.trim_zeros(data1d.y) 261 numpy.trim_zeros(data1d.dy) 262 size_dx = data1d.dx.size 263 size_dxl = data1d.dxl.size 264 size_dxw = data1d.dxw.size 265 if data1d._xunit != data1d.x_unit: 266 data1d.x_unit = data1d._xunit 267 if data1d._yunit != data1d.y_unit: 268 data1d.y_unit = data1d._yunit 269 if size_dxl == 0 and size_dxw == 0: 270 data1d.dxl = None 271 data1d.dxw = None 272 numpy.trim_zeros(data1d.dx) 273 elif size_dx == 0: 274 data1d.dx = None 275 size_dx = size_dxl 276 numpy.trim_zeros(data1d.dxl) 277 numpy.trim_zeros(data1d.dxw) 278 return data1d 569 570 ## Combine all plottables with datainfo and append each to output 571 ## Type cast data arrays to float64 and find min/max as appropriate 572 for dataset in self.data: 573 if dataset.x is not None: 574 dataset.x = np.delete(dataset.x, [0]) 575 dataset.x = dataset.x.astype(np.float64) 576 dataset.xmin = np.min(dataset.x) 577 dataset.xmax = np.max(dataset.x) 578 if dataset.y is not None: 579 dataset.y = np.delete(dataset.y, [0]) 580 dataset.y = dataset.y.astype(np.float64) 581 dataset.ymin = np.min(dataset.y) 582 dataset.ymax = np.max(dataset.y) 583 if dataset.dx is not None: 584 dataset.dx = np.delete(dataset.dx, [0]) 585 dataset.dx = dataset.dx.astype(np.float64) 586 if dataset.dxl is not None: 587 dataset.dxl = np.delete(dataset.dxl, [0]) 588 dataset.dxl = dataset.dxl.astype(np.float64) 589 if dataset.dxw is not None: 590 dataset.dxw = np.delete(dataset.dxw, [0]) 591 dataset.dxw = dataset.dxw.astype(np.float64) 592 if dataset.dy is not None: 593 dataset.dy = np.delete(dataset.dy, [0]) 594 dataset.dy = dataset.dy.astype(np.float64) 595 np.trim_zeros(dataset.x) 596 np.trim_zeros(dataset.y) 597 np.trim_zeros(dataset.dy) 598 final_dataset = combine_data(dataset, self.current_datainfo) 599 self.output.append(final_dataset) 279 600 280 601 def _create_unique_key(self, dictionary, name, numb=0): 281 602 """ 282 603 Create a unique key value for any dictionary to prevent overwriting 283 Recurse suntil a unique key value is found.604 Recurse until a unique key value is found. 284 605 285 606 :param dictionary: A dictionary with any number of entries … … 294 615 return name 295 616 296 def _unit_conversion(self, node, new_current_level, data1d, \ 297 tagname, node_value): 617 def _get_node_value(self, node, tagname): 618 """ 619 Get the value of a node and any applicable units 620 621 :param node: The XML node to get the value of 622 :param tagname: The tagname of the node 623 """ 624 #Get the text from the node and convert all whitespace to spaces 625 units = '' 626 node_value = node.text 627 if node_value is not None: 628 node_value = ' '.join(node_value.split()) 629 else: 630 node_value = "" 631 632 # If the value is a float, compile with units. 633 if self.ns_list.ns_datatype == "float": 634 # If an empty value is given, set as zero. 635 if node_value is None or node_value.isspace() \ 636 or node_value.lower() == "nan": 637 node_value = "0.0" 638 #Convert the value to the base units 639 node_value, units = self._unit_conversion(node, tagname, node_value) 640 641 # If the value is a timestamp, convert to a datetime object 642 elif self.ns_list.ns_datatype == "timestamp": 643 if node_value is None or node_value.isspace(): 644 pass 645 else: 646 try: 647 node_value = \ 648 datetime.datetime.fromtimestamp(node_value) 649 except ValueError: 650 node_value = None 651 return node_value, units 652 653 def _unit_conversion(self, node, tagname, node_value): 298 654 """ 299 655 A unit converter method used to convert the data included in the file 300 656 to the default units listed in data_info 301 657 302 :param new_current_level: cansas_constants level as returned by 303 iterate_namespace 304 :param attr: The attributes of the node 305 :param data1d: Where the values will be saved 658 :param node: XML node 659 :param tagname: name of the node 306 660 :param node_value: The value of the current dom node 307 661 """ … … 310 664 err_msg = None 311 665 default_unit = None 312 if 'unit' in attr and new_current_level.get('unit') is not None: 666 if not isinstance(node_value, float): 667 node_value = float(node_value) 668 if 'unit' in attr and attr.get('unit') is not None: 313 669 try: 314 670 local_unit = attr['unit'] 315 if isinstance(node_value, float) is False: 316 exec("node_value = float({0})".format(node_value)) 317 unitname = new_current_level.get("unit") 318 exec "default_unit = data1d.{0}".format(unitname) 319 if local_unit is not None and default_unit is not None and \ 320 local_unit.lower() != default_unit.lower() \ 671 unitname = self.ns_list.current_level.get("unit", "") 672 if "SASdetector" in self.names: 673 save_in = "detector" 674 elif "aperture" in self.names: 675 save_in = "aperture" 676 elif "SAScollimation" in self.names: 677 save_in = "collimation" 678 elif "SAStransmission_spectrum" in self.names: 679 save_in = "transspectrum" 680 elif "SASdata" in self.names: 681 x = np.zeros(1) 682 y = np.zeros(1) 683 self.current_data1d = Data1D(x, y) 684 save_in = "current_data1d" 685 elif "SASsource" in self.names: 686 save_in = "current_datainfo.source" 687 elif "SASsample" in self.names: 688 save_in = "current_datainfo.sample" 689 elif "SASprocess" in self.names: 690 save_in = "process" 691 else: 692 save_in = "current_datainfo" 693 exec "default_unit = self.{0}.{1}".format(save_in, unitname) 694 if local_unit and default_unit and local_unit.lower() != default_unit.lower() \ 321 695 and local_unit.lower() != "none": 322 696 if HAS_CONVERTER == True: … … 345 719 if err_msg: 346 720 self.errors.add(err_msg) 347 node_value = "float({0})".format(node_value)348 721 return node_value, value_unit 349 722 350 def _check_for_empty_data(self , data1d):723 def _check_for_empty_data(self): 351 724 """ 352 725 Creates an empty data set if no data is passed to the reader … … 354 727 :param data1d: presumably a Data1D object 355 728 """ 356 if data1d == None: 357 self.errors = set() 358 x_vals = numpy.empty(0) 359 y_vals = numpy.empty(0) 360 dx_vals = numpy.empty(0) 361 dy_vals = numpy.empty(0) 362 dxl = numpy.empty(0) 363 dxw = numpy.empty(0) 364 data1d = Data1D(x_vals, y_vals, dx_vals, dy_vals) 365 data1d.dxl = dxl 366 data1d.dxw = dxw 367 return data1d 368 369 def _handle_special_cases(self, tagname, data1d, children): 370 """ 371 Handle cases where the data type in Data1D is a dictionary or list 372 373 :param tagname: XML tagname in use 374 :param data1d: The original Data1D object 375 :param children: Child nodes of node 376 :param node: existing node with tag name 'tagname' 377 """ 378 if tagname == "SASdetector": 379 data1d = Detector() 380 elif tagname == "SAScollimation": 381 data1d = Collimation() 382 elif tagname == "SAStransmission_spectrum": 383 data1d = TransmissionSpectrum() 384 elif tagname == "SASprocess": 385 data1d = Process() 386 for child in children: 387 if child.tag.replace(self.base_ns, "") == "term": 388 term_attr = {} 389 for attr in child.keys(): 390 term_attr[attr] = \ 391 ' '.join(child.get(attr).split()) 392 if child.text is not None: 393 term_attr['value'] = \ 394 ' '.join(child.text.split()) 395 data1d.term.append(term_attr) 396 elif tagname == "aperture": 397 data1d = Aperture() 398 if tagname == "Idata" and children is not None: 399 data1d = self._check_for_empty_resolution(data1d, children) 400 return data1d 401 402 def _check_for_empty_resolution(self, data1d, children): 729 if self.current_dataset == None: 730 x_vals = np.empty(0) 731 y_vals = np.empty(0) 732 dx_vals = np.empty(0) 733 dy_vals = np.empty(0) 734 dxl = np.empty(0) 735 dxw = np.empty(0) 736 self.current_dataset = plottable_1D(x_vals, y_vals, dx_vals, dy_vals) 737 self.current_dataset.dxl = dxl 738 self.current_dataset.dxw = dxw 739 740 def _check_for_empty_resolution(self): 403 741 """ 404 742 A method to check all resolution data sets are the same size as I and Q … … 408 746 dq_exists = False 409 747 di_exists = False 410 for child in children: 411 tag = child.tag.replace(self.base_ns, "") 412 if tag == "dQl": 413 dql_exists = True 414 if tag == "dQw": 415 dqw_exists = True 416 if tag == "Qdev": 417 dq_exists = True 418 if tag == "Idev": 419 di_exists = True 420 if dqw_exists and dql_exists == False: 421 data1d.dxl = numpy.append(data1d.dxl, 0.0) 422 elif dql_exists and dqw_exists == False: 423 data1d.dxw = numpy.append(data1d.dxw, 0.0) 424 elif dql_exists == False and dqw_exists == False \ 425 and dq_exists == False: 426 data1d.dx = numpy.append(data1d.dx, 0.0) 427 if di_exists == False: 428 data1d.dy = numpy.append(data1d.dy, 0.0) 429 return data1d 430 431 def _restore_original_case(self, 432 tagname_original, 433 tagname, 434 save_data1d, 435 data1d): 436 """ 437 Save the special case data to the appropriate location and restore 438 the original Data1D object 439 440 :param tagname_original: Unmodified tagname for the node 441 :param tagname: modified tagname for the node 442 :param save_data1d: The original Data1D object 443 :param data1d: If a special case was handled, an object of that type 444 """ 445 if tagname_original == "SASdetector": 446 save_data1d.detector.append(data1d) 447 elif tagname_original == "SAScollimation": 448 save_data1d.collimation.append(data1d) 449 elif tagname == "SAStransmission_spectrum": 450 save_data1d.trans_spectrum.append(data1d) 451 elif tagname_original == "SASprocess": 452 save_data1d.process.append(data1d) 453 elif tagname_original == "aperture": 454 save_data1d.aperture.append(data1d) 455 else: 456 save_data1d = data1d 457 return save_data1d 458 459 def _handle_attributes(self, node, data1d, cs_values, tagname): 460 """ 461 Process all of the attributes for a node 462 """ 463 attr = node.attrib 464 if attr is not None: 465 for key in node.keys(): 466 try: 467 node_value, unit = self._get_node_value(node, cs_values, \ 468 data1d, tagname) 469 cansas_attrib = \ 470 cs_values.current_level.get("attributes").get(key) 471 attrib_variable = cansas_attrib.get("variable") 472 if key == 'unit' and unit != '': 473 attrib_value = unit 474 else: 475 attrib_value = node.attrib[key] 476 store_attr = attrib_variable.format("data1d", 477 attrib_value, 478 key, 479 node_value) 480 exec store_attr 481 except AttributeError: 482 pass 483 return data1d 484 485 def _get_node_value(self, node, cs_values, data1d, tagname): 486 """ 487 Get the value of a node and any applicable units 488 489 :param node: The XML node to get the value of 490 :param cs_values: A CansasConstants.CurrentLevel object 491 :param attr: The node attributes 492 :param dataid: The working object to be modified 493 :param tagname: The tagname of the node 494 """ 495 #Get the text from the node and convert all whitespace to spaces 496 units = '' 497 node_value = node.text 498 if node_value == "": 499 node_value = None 500 if node_value is not None: 501 node_value = ' '.join(node_value.split()) 502 503 # If the value is a float, compile with units. 504 if cs_values.ns_datatype == "float": 505 # If an empty value is given, set as zero. 506 if node_value is None or node_value.isspace() \ 507 or node_value.lower() == "nan": 508 node_value = "0.0" 509 #Convert the value to the base units 510 node_value, units = self._unit_conversion(node, \ 511 cs_values.current_level, data1d, tagname, node_value) 512 513 # If the value is a timestamp, convert to a datetime object 514 elif cs_values.ns_datatype == "timestamp": 515 if node_value is None or node_value.isspace(): 516 pass 517 else: 518 try: 519 node_value = \ 520 datetime.datetime.fromtimestamp(node_value) 521 except ValueError: 522 node_value = None 523 return node_value, units 524 525 def _parse_entry(self, dom, names=None, data1d=None, extras=None): 526 """ 527 Parse a SASEntry - new recursive method for parsing the dom of 528 the CanSAS data format. This will allow multiple data files 529 and extra nodes to be read in simultaneously. 530 531 :param dom: dom object with a namespace base of names 532 :param names: A list of element names that lead up to the dom object 533 :param data1d: The data1d object that will be modified 534 :param extras: Any values that should go into meta_data when data1d 535 is not a Data1D object 536 """ 537 538 if extras is None: 539 extras = [] 540 if names is None or names == []: 541 names = ["SASentry"] 542 543 data1d = self._check_for_empty_data(data1d) 544 545 self.base_ns = "{0}{1}{2}".format("{", \ 546 CANSAS_NS.get(self.cansas_version).get("ns"), "}") 547 tagname = '' 548 tagname_original = '' 549 550 # Go through each child in the parent element 551 for node in dom: 552 try: 553 # Get the element name and set the current names level 554 tagname = node.tag.replace(self.base_ns, "") 555 tagname_original = tagname 556 if tagname == "fitting_plug_in" or tagname == "pr_inversion" or\ 557 tagname == "invariant": 558 continue 559 names.append(tagname) 560 children = node.getchildren() 561 if len(children) == 0: 562 children = None 563 save_data1d = data1d 564 565 # Look for special cases 566 data1d = self._handle_special_cases(tagname, data1d, children) 567 568 # Get where to store content 569 cs_values = CONSTANTS.iterate_namespace(names) 570 # If the element is a child element, recurse 571 if children is not None: 572 # Returned value is new Data1D object with all previous and 573 # new values in it. 574 data1d, extras = self._parse_entry(node, 575 names, data1d, extras) 576 577 #Get the information from the node 578 node_value, _ = self._get_node_value(node, cs_values, \ 579 data1d, tagname) 580 581 # If appending to a dictionary (meta_data | run_name) 582 # make sure the key is unique 583 if cs_values.ns_variable == "{0}.meta_data[\"{2}\"] = \"{1}\"": 584 # If we are within a Process, Detector, Collimation or 585 # Aperture instance, pull out old data1d 586 tagname = self._create_unique_key(data1d.meta_data, \ 587 tagname, 0) 588 if isinstance(data1d, Data1D) == False: 589 store_me = cs_values.ns_variable.format("data1d", \ 590 node_value, tagname) 591 extras.append(store_me) 592 cs_values.ns_variable = None 593 if cs_values.ns_variable == "{0}.run_name[\"{2}\"] = \"{1}\"": 594 tagname = self._create_unique_key(data1d.run_name, \ 595 tagname, 0) 596 597 # Check for Data1D object and any extra commands to save 598 if isinstance(data1d, Data1D): 599 for item in extras: 600 exec item 601 # Don't bother saving empty information unless it is a float 602 if cs_values.ns_variable is not None and \ 603 node_value is not None and \ 604 node_value.isspace() == False: 605 # Format a string and then execute it. 606 store_me = cs_values.ns_variable.format("data1d", \ 607 node_value, tagname) 608 exec store_me 609 # Get attributes and process them 610 data1d = self._handle_attributes(node, data1d, cs_values, \ 611 tagname) 612 613 except TypeError: 614 pass 615 except Exception as excep: 616 exc_type, exc_obj, exc_tb = sys.exc_info() 617 fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] 618 print(excep, exc_type, fname, exc_tb.tb_lineno, \ 619 tagname, exc_obj) 620 finally: 621 # Save special cases in original data1d object 622 # then restore the data1d 623 save_data1d = self._restore_original_case(tagname_original, \ 624 tagname, save_data1d, data1d) 625 if tagname_original == "fitting_plug_in" or \ 626 tagname_original == "invariant" or \ 627 tagname_original == "pr_inversion": 628 pass 629 else: 630 data1d = save_data1d 631 # Remove tagname from names to restore original base 632 names.remove(tagname_original) 633 return data1d, extras 634 635 def _get_pi_string(self): 636 """ 637 Creates the processing instructions header for writing to file 638 """ 639 pis = self.return_processing_instructions() 640 if len(pis) > 0: 641 pi_tree = self.create_tree(pis[0]) 642 i = 1 643 for i in range(1, len(pis) - 1): 644 pi_tree = self.append(pis[i], pi_tree) 645 pi_string = self.to_string(pi_tree) 646 else: 647 pi_string = "" 648 return pi_string 649 650 def _create_main_node(self): 651 """ 652 Creates the primary xml header used when writing to file 653 """ 654 xsi = "http://www.w3.org/2001/XMLSchema-instance" 655 version = self.cansas_version 656 n_s = CANSAS_NS.get(version).get("ns") 657 if version == "1.1": 658 url = "http://www.cansas.org/formats/1.1/" 659 else: 660 url = "http://svn.smallangles.net/svn/canSAS/1dwg/trunk/" 661 schema_location = "{0} {1}cansas1d.xsd".format(n_s, url) 662 attrib = {"{" + xsi + "}schemaLocation" : schema_location, 663 "version" : version} 664 nsmap = {'xsi' : xsi, None: n_s} 665 666 main_node = self.create_element("{" + n_s + "}SASroot", 667 attrib=attrib, nsmap=nsmap) 668 return main_node 669 670 def _write_run_names(self, datainfo, entry_node): 671 """ 672 Writes the run names to the XML file 673 674 :param datainfo: The Data1D object the information is coming from 675 :param entry_node: lxml node ElementTree object to be appended to 676 """ 677 if datainfo.run == None or datainfo.run == []: 678 datainfo.run.append(RUN_NAME_DEFAULT) 679 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT 680 for item in datainfo.run: 681 runname = {} 682 if item in datainfo.run_name and \ 683 len(str(datainfo.run_name[item])) > 1: 684 runname = {'name': datainfo.run_name[item]} 685 self.write_node(entry_node, "Run", item, runname) 686 687 def _write_data(self, datainfo, entry_node): 688 """ 689 Writes the I and Q data to the XML file 690 691 :param datainfo: The Data1D object the information is coming from 692 :param entry_node: lxml node ElementTree object to be appended to 693 """ 694 node = self.create_element("SASdata") 695 self.append(node, entry_node) 696 697 for i in range(len(datainfo.x)): 698 point = self.create_element("Idata") 699 node.append(point) 700 self.write_node(point, "Q", datainfo.x[i], 701 {'unit': datainfo.x_unit}) 702 if len(datainfo.y) >= i: 703 self.write_node(point, "I", datainfo.y[i], 704 {'unit': datainfo.y_unit}) 705 if datainfo.dy != None and len(datainfo.dy) > i: 706 self.write_node(point, "Idev", datainfo.dy[i], 707 {'unit': datainfo.y_unit}) 708 if datainfo.dx != None and len(datainfo.dx) > i: 709 self.write_node(point, "Qdev", datainfo.dx[i], 710 {'unit': datainfo.x_unit}) 711 if datainfo.dxw != None and len(datainfo.dxw) > i: 712 self.write_node(point, "dQw", datainfo.dxw[i], 713 {'unit': datainfo.x_unit}) 714 if datainfo.dxl != None and len(datainfo.dxl) > i: 715 self.write_node(point, "dQl", datainfo.dxl[i], 716 {'unit': datainfo.x_unit}) 717 718 def _write_trans_spectrum(self, datainfo, entry_node): 719 """ 720 Writes the transmission spectrum data to the XML file 721 722 :param datainfo: The Data1D object the information is coming from 723 :param entry_node: lxml node ElementTree object to be appended to 724 """ 725 for i in range(len(datainfo.trans_spectrum)): 726 spectrum = datainfo.trans_spectrum[i] 727 node = self.create_element("SAStransmission_spectrum", 728 {"name" : spectrum.name}) 729 self.append(node, entry_node) 730 if isinstance(spectrum.timestamp, datetime.datetime): 731 node.setAttribute("timestamp", spectrum.timestamp) 732 for i in range(len(spectrum.wavelength)): 733 point = self.create_element("Tdata") 734 node.append(point) 735 self.write_node(point, "Lambda", spectrum.wavelength[i], 736 {'unit': spectrum.wavelength_unit}) 737 self.write_node(point, "T", spectrum.transmission[i], 738 {'unit': spectrum.transmission_unit}) 739 if spectrum.transmission_deviation != None \ 740 and len(spectrum.transmission_deviation) >= i: 741 self.write_node(point, "Tdev", 742 spectrum.transmission_deviation[i], 743 {'unit': 744 spectrum.transmission_deviation_unit}) 745 746 def _write_sample_info(self, datainfo, entry_node): 747 """ 748 Writes the sample information to the XML file 749 750 :param datainfo: The Data1D object the information is coming from 751 :param entry_node: lxml node ElementTree object to be appended to 752 """ 753 sample = self.create_element("SASsample") 754 if datainfo.sample.name is not None: 755 self.write_attribute(sample, "name", 756 str(datainfo.sample.name)) 757 self.append(sample, entry_node) 758 self.write_node(sample, "ID", str(datainfo.sample.ID)) 759 self.write_node(sample, "thickness", datainfo.sample.thickness, 760 {"unit": datainfo.sample.thickness_unit}) 761 self.write_node(sample, "transmission", datainfo.sample.transmission) 762 self.write_node(sample, "temperature", datainfo.sample.temperature, 763 {"unit": datainfo.sample.temperature_unit}) 764 765 pos = self.create_element("position") 766 written = self.write_node(pos, 767 "x", 768 datainfo.sample.position.x, 769 {"unit": datainfo.sample.position_unit}) 770 written = written | self.write_node( \ 771 pos, "y", datainfo.sample.position.y, 772 {"unit": datainfo.sample.position_unit}) 773 written = written | self.write_node( \ 774 pos, "z", datainfo.sample.position.z, 775 {"unit": datainfo.sample.position_unit}) 776 if written == True: 777 self.append(pos, sample) 778 779 ori = self.create_element("orientation") 780 written = self.write_node(ori, "roll", 781 datainfo.sample.orientation.x, 782 {"unit": datainfo.sample.orientation_unit}) 783 written = written | self.write_node( \ 784 ori, "pitch", datainfo.sample.orientation.y, 785 {"unit": datainfo.sample.orientation_unit}) 786 written = written | self.write_node( \ 787 ori, "yaw", datainfo.sample.orientation.z, 788 {"unit": datainfo.sample.orientation_unit}) 789 if written == True: 790 self.append(ori, sample) 791 792 for item in datainfo.sample.details: 793 self.write_node(sample, "details", item) 794 795 def _write_instrument(self, datainfo, entry_node): 796 """ 797 Writes the instrumental information to the XML file 798 799 :param datainfo: The Data1D object the information is coming from 800 :param entry_node: lxml node ElementTree object to be appended to 801 """ 802 instr = self.create_element("SASinstrument") 803 self.append(instr, entry_node) 804 self.write_node(instr, "name", datainfo.instrument) 805 return instr 806 807 def _write_source(self, datainfo, instr): 808 """ 809 Writes the source information to the XML file 810 811 :param datainfo: The Data1D object the information is coming from 812 :param instr: instrument node to be appended to 813 """ 814 source = self.create_element("SASsource") 815 if datainfo.source.name is not None: 816 self.write_attribute(source, "name", 817 str(datainfo.source.name)) 818 self.append(source, instr) 819 if datainfo.source.radiation == None or datainfo.source.radiation == '': 820 datainfo.source.radiation = "neutron" 821 self.write_node(source, "radiation", datainfo.source.radiation) 822 823 size = self.create_element("beam_size") 824 if datainfo.source.beam_size_name is not None: 825 self.write_attribute(size, "name", 826 str(datainfo.source.beam_size_name)) 827 written = self.write_node( \ 828 size, "x", datainfo.source.beam_size.x, 829 {"unit": datainfo.source.beam_size_unit}) 830 written = written | self.write_node( \ 831 size, "y", datainfo.source.beam_size.y, 832 {"unit": datainfo.source.beam_size_unit}) 833 written = written | self.write_node( \ 834 size, "z", datainfo.source.beam_size.z, 835 {"unit": datainfo.source.beam_size_unit}) 836 if written == True: 837 self.append(size, source) 838 839 self.write_node(source, "beam_shape", datainfo.source.beam_shape) 840 self.write_node(source, "wavelength", 841 datainfo.source.wavelength, 842 {"unit": datainfo.source.wavelength_unit}) 843 self.write_node(source, "wavelength_min", 844 datainfo.source.wavelength_min, 845 {"unit": datainfo.source.wavelength_min_unit}) 846 self.write_node(source, "wavelength_max", 847 datainfo.source.wavelength_max, 848 {"unit": datainfo.source.wavelength_max_unit}) 849 self.write_node(source, "wavelength_spread", 850 datainfo.source.wavelength_spread, 851 {"unit": datainfo.source.wavelength_spread_unit}) 852 853 def _write_collimation(self, datainfo, instr): 854 """ 855 Writes the collimation information to the XML file 856 857 :param datainfo: The Data1D object the information is coming from 858 :param instr: lxml node ElementTree object to be appended to 859 """ 860 if datainfo.collimation == [] or datainfo.collimation == None: 861 coll = Collimation() 862 datainfo.collimation.append(coll) 863 for item in datainfo.collimation: 864 coll = self.create_element("SAScollimation") 865 if item.name is not None: 866 self.write_attribute(coll, "name", str(item.name)) 867 self.append(coll, instr) 868 869 self.write_node(coll, "length", item.length, 870 {"unit": item.length_unit}) 871 872 for aperture in item.aperture: 873 apert = self.create_element("aperture") 874 if aperture.name is not None: 875 self.write_attribute(apert, "name", str(aperture.name)) 876 if aperture.type is not None: 877 self.write_attribute(apert, "type", str(aperture.type)) 878 self.append(apert, coll) 879 880 size = self.create_element("size") 881 if aperture.size_name is not None: 882 self.write_attribute(size, "name", 883 str(aperture.size_name)) 884 written = self.write_node(size, "x", aperture.size.x, 885 {"unit": aperture.size_unit}) 886 written = written | self.write_node( \ 887 size, "y", aperture.size.y, 888 {"unit": aperture.size_unit}) 889 written = written | self.write_node( \ 890 size, "z", aperture.size.z, 891 {"unit": aperture.size_unit}) 892 if written == True: 893 self.append(size, apert) 894 895 self.write_node(apert, "distance", aperture.distance, 896 {"unit": aperture.distance_unit}) 897 898 def _write_detectors(self, datainfo, instr): 899 """ 900 Writes the detector information to the XML file 901 902 :param datainfo: The Data1D object the information is coming from 903 :param inst: lxml instrument node to be appended to 904 """ 905 if datainfo.detector == None or datainfo.detector == []: 906 det = Detector() 907 det.name = "" 908 datainfo.detector.append(det) 909 910 for item in datainfo.detector: 911 det = self.create_element("SASdetector") 912 written = self.write_node(det, "name", item.name) 913 written = written | self.write_node(det, "SDD", item.distance, 914 {"unit": item.distance_unit}) 915 if written == True: 916 self.append(det, instr) 917 918 off = self.create_element("offset") 919 written = self.write_node(off, "x", item.offset.x, 920 {"unit": item.offset_unit}) 921 written = written | self.write_node(off, "y", item.offset.y, 922 {"unit": item.offset_unit}) 923 written = written | self.write_node(off, "z", item.offset.z, 924 {"unit": item.offset_unit}) 925 if written == True: 926 self.append(off, det) 927 928 ori = self.create_element("orientation") 929 written = self.write_node(ori, "roll", item.orientation.x, 930 {"unit": item.orientation_unit}) 931 written = written | self.write_node(ori, "pitch", 932 item.orientation.y, 933 {"unit": item.orientation_unit}) 934 written = written | self.write_node(ori, "yaw", 935 item.orientation.z, 936 {"unit": item.orientation_unit}) 937 if written == True: 938 self.append(ori, det) 939 940 center = self.create_element("beam_center") 941 written = self.write_node(center, "x", item.beam_center.x, 942 {"unit": item.beam_center_unit}) 943 written = written | self.write_node(center, "y", 944 item.beam_center.y, 945 {"unit": item.beam_center_unit}) 946 written = written | self.write_node(center, "z", 947 item.beam_center.z, 948 {"unit": item.beam_center_unit}) 949 if written == True: 950 self.append(center, det) 951 952 pix = self.create_element("pixel_size") 953 written = self.write_node(pix, "x", item.pixel_size.x, 954 {"unit": item.pixel_size_unit}) 955 written = written | self.write_node(pix, "y", item.pixel_size.y, 956 {"unit": item.pixel_size_unit}) 957 written = written | self.write_node(pix, "z", item.pixel_size.z, 958 {"unit": item.pixel_size_unit}) 959 written = written | self.write_node(det, "slit_length", 960 item.slit_length, 961 {"unit": item.slit_length_unit}) 962 if written == True: 963 self.append(pix, det) 964 965 def _write_process_notes(self, datainfo, entry_node): 966 """ 967 Writes the process notes to the XML file 968 969 :param datainfo: The Data1D object the information is coming from 970 :param entry_node: lxml node ElementTree object to be appended to 971 972 """ 973 for item in datainfo.process: 974 node = self.create_element("SASprocess") 975 self.append(node, entry_node) 976 self.write_node(node, "name", item.name) 977 self.write_node(node, "date", item.date) 978 self.write_node(node, "description", item.description) 979 for term in item.term: 980 value = term['value'] 981 del term['value'] 982 self.write_node(node, "term", value, term) 983 for note in item.notes: 984 self.write_node(node, "SASprocessnote", note) 985 if len(item.notes) == 0: 986 self.write_node(node, "SASprocessnote", "") 987 988 def _write_notes(self, datainfo, entry_node): 989 """ 990 Writes the notes to the XML file and creates an empty note if none 991 exist 992 993 :param datainfo: The Data1D object the information is coming from 994 :param entry_node: lxml node ElementTree object to be appended to 995 996 """ 997 if len(datainfo.notes) == 0: 998 node = self.create_element("SASnote") 999 self.append(node, entry_node) 1000 else: 1001 for item in datainfo.notes: 1002 node = self.create_element("SASnote") 1003 self.write_text(node, item) 1004 self.append(node, entry_node) 1005 1006 def _check_origin(self, entry_node, doc, frm): 1007 """ 1008 Return the document, and the SASentry node associated with 1009 the data we just wrote. 1010 If the calling function was not the cansas reader, return a minidom 1011 object rather than an lxml object. 1012 1013 :param entry_node: lxml node ElementTree object to be appended to 1014 :param doc: entire xml tree 1015 """ 1016 if not frm: 1017 frm = inspect.stack()[1] 1018 mod_name = frm[1].replace("\\", "/").replace(".pyc", "") 1019 mod_name = mod_name.replace(".py", "") 1020 mod = mod_name.split("sas/") 1021 mod_name = mod[1] 1022 if mod_name != "sascalc/dataloader/readers/cansas_reader": 1023 string = self.to_string(doc, pretty_print=False) 1024 doc = parseString(string) 1025 node_name = entry_node.tag 1026 node_list = doc.getElementsByTagName(node_name) 1027 entry_node = node_list.item(0) 1028 return doc, entry_node 748 if self.current_dataset.dxl is not None: 749 dql_exists = True 750 if self.current_dataset.dxw is not None: 751 dqw_exists = True 752 if self.current_dataset.dx is not None: 753 dq_exists = True 754 if self.current_dataset.dy is not None: 755 di_exists = True 756 if dqw_exists and not dql_exists: 757 array_size = self.current_dataset.dxw.size - 1 758 self.current_dataset.dxl = np.append(self.current_dataset.dxl, np.zeros([array_size])) 759 elif dql_exists and not dqw_exists: 760 array_size = self.current_dataset.dxl.size - 1 761 self.current_dataset.dxw = np.append(self.current_dataset.dxw, np.zeros([array_size])) 762 elif not dql_exists and not dqw_exists and not dq_exists: 763 array_size = self.current_dataset.x.size - 1 764 self.current_dataset.dx = np.append(self.current_dataset.dx, np.zeros([array_size])) 765 if not di_exists: 766 array_size = self.current_dataset.y.size - 1 767 self.current_dataset.dy = np.append(self.current_dataset.dy, np.zeros([array_size])) 768 769 770 ####### All methods below are for writing CanSAS XML files ####### 771 772 773 def write(self, filename, datainfo): 774 """ 775 Write the content of a Data1D as a CanSAS XML file 776 777 :param filename: name of the file to write 778 :param datainfo: Data1D object 779 """ 780 # Create XML document 781 doc, _ = self._to_xml_doc(datainfo) 782 # Write the file 783 file_ref = open(filename, 'w') 784 if self.encoding == None: 785 self.encoding = "UTF-8" 786 doc.write(file_ref, encoding=self.encoding, 787 pretty_print=True, xml_declaration=True) 788 file_ref.close() 1029 789 1030 790 def _to_xml_doc(self, datainfo): … … 1095 855 return False 1096 856 1097 def write(self, filename, datainfo): 1098 """ 1099 Write the content of a Data1D as a CanSAS XML file 1100 1101 :param filename: name of the file to write 1102 :param datainfo: Data1D object 1103 """ 1104 # Create XML document 1105 doc, _ = self._to_xml_doc(datainfo) 1106 # Write the file 1107 file_ref = open(filename, 'w') 1108 if self.encoding == None: 1109 self.encoding = "UTF-8" 1110 doc.write(file_ref, encoding=self.encoding, 1111 pretty_print=True, xml_declaration=True) 1112 file_ref.close() 857 def _get_pi_string(self): 858 """ 859 Creates the processing instructions header for writing to file 860 """ 861 pis = self.return_processing_instructions() 862 if len(pis) > 0: 863 pi_tree = self.create_tree(pis[0]) 864 i = 1 865 for i in range(1, len(pis) - 1): 866 pi_tree = self.append(pis[i], pi_tree) 867 pi_string = self.to_string(pi_tree) 868 else: 869 pi_string = "" 870 return pi_string 871 872 def _create_main_node(self): 873 """ 874 Creates the primary xml header used when writing to file 875 """ 876 xsi = "http://www.w3.org/2001/XMLSchema-instance" 877 version = self.cansas_version 878 n_s = CANSAS_NS.get(version).get("ns") 879 if version == "1.1": 880 url = "http://www.cansas.org/formats/1.1/" 881 else: 882 url = "http://svn.smallangles.net/svn/canSAS/1dwg/trunk/" 883 schema_location = "{0} {1}cansas1d.xsd".format(n_s, url) 884 attrib = {"{" + xsi + "}schemaLocation" : schema_location, 885 "version" : version} 886 nsmap = {'xsi' : xsi, None: n_s} 887 888 main_node = self.create_element("{" + n_s + "}SASroot", 889 attrib=attrib, nsmap=nsmap) 890 return main_node 891 892 def _write_run_names(self, datainfo, entry_node): 893 """ 894 Writes the run names to the XML file 895 896 :param datainfo: The Data1D object the information is coming from 897 :param entry_node: lxml node ElementTree object to be appended to 898 """ 899 if datainfo.run == None or datainfo.run == []: 900 datainfo.run.append(RUN_NAME_DEFAULT) 901 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT 902 for item in datainfo.run: 903 runname = {} 904 if item in datainfo.run_name and \ 905 len(str(datainfo.run_name[item])) > 1: 906 runname = {'name': datainfo.run_name[item]} 907 self.write_node(entry_node, "Run", item, runname) 908 909 def _write_data(self, datainfo, entry_node): 910 """ 911 Writes the I and Q data to the XML file 912 913 :param datainfo: The Data1D object the information is coming from 914 :param entry_node: lxml node ElementTree object to be appended to 915 """ 916 node = self.create_element("SASdata") 917 self.append(node, entry_node) 918 919 for i in range(len(datainfo.x)): 920 point = self.create_element("Idata") 921 node.append(point) 922 self.write_node(point, "Q", datainfo.x[i], 923 {'unit': datainfo.x_unit}) 924 if len(datainfo.y) >= i: 925 self.write_node(point, "I", datainfo.y[i], 926 {'unit': datainfo.y_unit}) 927 if datainfo.dy is not None and len(datainfo.dy) > i: 928 self.write_node(point, "Idev", datainfo.dy[i], 929 {'unit': datainfo.y_unit}) 930 if datainfo.dx is not None and len(datainfo.dx) > i: 931 self.write_node(point, "Qdev", datainfo.dx[i], 932 {'unit': datainfo.x_unit}) 933 if datainfo.dxw is not None and len(datainfo.dxw) > i: 934 self.write_node(point, "dQw", datainfo.dxw[i], 935 {'unit': datainfo.x_unit}) 936 if datainfo.dxl is not None and len(datainfo.dxl) > i: 937 self.write_node(point, "dQl", datainfo.dxl[i], 938 {'unit': datainfo.x_unit}) 939 940 def _write_trans_spectrum(self, datainfo, entry_node): 941 """ 942 Writes the transmission spectrum data to the XML file 943 944 :param datainfo: The Data1D object the information is coming from 945 :param entry_node: lxml node ElementTree object to be appended to 946 """ 947 for i in range(len(datainfo.trans_spectrum)): 948 spectrum = datainfo.trans_spectrum[i] 949 node = self.create_element("SAStransmission_spectrum", 950 {"name" : spectrum.name}) 951 self.append(node, entry_node) 952 if isinstance(spectrum.timestamp, datetime.datetime): 953 node.setAttribute("timestamp", spectrum.timestamp) 954 for i in range(len(spectrum.wavelength)): 955 point = self.create_element("Tdata") 956 node.append(point) 957 self.write_node(point, "Lambda", spectrum.wavelength[i], 958 {'unit': spectrum.wavelength_unit}) 959 self.write_node(point, "T", spectrum.transmission[i], 960 {'unit': spectrum.transmission_unit}) 961 if spectrum.transmission_deviation != None \ 962 and len(spectrum.transmission_deviation) >= i: 963 self.write_node(point, "Tdev", 964 spectrum.transmission_deviation[i], 965 {'unit': 966 spectrum.transmission_deviation_unit}) 967 968 def _write_sample_info(self, datainfo, entry_node): 969 """ 970 Writes the sample information to the XML file 971 972 :param datainfo: The Data1D object the information is coming from 973 :param entry_node: lxml node ElementTree object to be appended to 974 """ 975 sample = self.create_element("SASsample") 976 if datainfo.sample.name is not None: 977 self.write_attribute(sample, "name", 978 str(datainfo.sample.name)) 979 self.append(sample, entry_node) 980 self.write_node(sample, "ID", str(datainfo.sample.ID)) 981 self.write_node(sample, "thickness", datainfo.sample.thickness, 982 {"unit": datainfo.sample.thickness_unit}) 983 self.write_node(sample, "transmission", datainfo.sample.transmission) 984 self.write_node(sample, "temperature", datainfo.sample.temperature, 985 {"unit": datainfo.sample.temperature_unit}) 986 987 pos = self.create_element("position") 988 written = self.write_node(pos, 989 "x", 990 datainfo.sample.position.x, 991 {"unit": datainfo.sample.position_unit}) 992 written = written | self.write_node( \ 993 pos, "y", datainfo.sample.position.y, 994 {"unit": datainfo.sample.position_unit}) 995 written = written | self.write_node( \ 996 pos, "z", datainfo.sample.position.z, 997 {"unit": datainfo.sample.position_unit}) 998 if written == True: 999 self.append(pos, sample) 1000 1001 ori = self.create_element("orientation") 1002 written = self.write_node(ori, "roll", 1003 datainfo.sample.orientation.x, 1004 {"unit": datainfo.sample.orientation_unit}) 1005 written = written | self.write_node( \ 1006 ori, "pitch", datainfo.sample.orientation.y, 1007 {"unit": datainfo.sample.orientation_unit}) 1008 written = written | self.write_node( \ 1009 ori, "yaw", datainfo.sample.orientation.z, 1010 {"unit": datainfo.sample.orientation_unit}) 1011 if written == True: 1012 self.append(ori, sample) 1013 1014 for item in datainfo.sample.details: 1015 self.write_node(sample, "details", item) 1016 1017 def _write_instrument(self, datainfo, entry_node): 1018 """ 1019 Writes the instrumental information to the XML file 1020 1021 :param datainfo: The Data1D object the information is coming from 1022 :param entry_node: lxml node ElementTree object to be appended to 1023 """ 1024 instr = self.create_element("SASinstrument") 1025 self.append(instr, entry_node) 1026 self.write_node(instr, "name", datainfo.instrument) 1027 return instr 1028 1029 def _write_source(self, datainfo, instr): 1030 """ 1031 Writes the source information to the XML file 1032 1033 :param datainfo: The Data1D object the information is coming from 1034 :param instr: instrument node to be appended to 1035 """ 1036 source = self.create_element("SASsource") 1037 if datainfo.source.name is not None: 1038 self.write_attribute(source, "name", 1039 str(datainfo.source.name)) 1040 self.append(source, instr) 1041 if datainfo.source.radiation == None or datainfo.source.radiation == '': 1042 datainfo.source.radiation = "neutron" 1043 self.write_node(source, "radiation", datainfo.source.radiation) 1044 1045 size = self.create_element("beam_size") 1046 if datainfo.source.beam_size_name is not None: 1047 self.write_attribute(size, "name", 1048 str(datainfo.source.beam_size_name)) 1049 written = self.write_node( \ 1050 size, "x", datainfo.source.beam_size.x, 1051 {"unit": datainfo.source.beam_size_unit}) 1052 written = written | self.write_node( \ 1053 size, "y", datainfo.source.beam_size.y, 1054 {"unit": datainfo.source.beam_size_unit}) 1055 written = written | self.write_node( \ 1056 size, "z", datainfo.source.beam_size.z, 1057 {"unit": datainfo.source.beam_size_unit}) 1058 if written == True: 1059 self.append(size, source) 1060 1061 self.write_node(source, "beam_shape", datainfo.source.beam_shape) 1062 self.write_node(source, "wavelength", 1063 datainfo.source.wavelength, 1064 {"unit": datainfo.source.wavelength_unit}) 1065 self.write_node(source, "wavelength_min", 1066 datainfo.source.wavelength_min, 1067 {"unit": datainfo.source.wavelength_min_unit}) 1068 self.write_node(source, "wavelength_max", 1069 datainfo.source.wavelength_max, 1070 {"unit": datainfo.source.wavelength_max_unit}) 1071 self.write_node(source, "wavelength_spread", 1072 datainfo.source.wavelength_spread, 1073 {"unit": datainfo.source.wavelength_spread_unit}) 1074 1075 def _write_collimation(self, datainfo, instr): 1076 """ 1077 Writes the collimation information to the XML file 1078 1079 :param datainfo: The Data1D object the information is coming from 1080 :param instr: lxml node ElementTree object to be appended to 1081 """ 1082 if datainfo.collimation == [] or datainfo.collimation == None: 1083 coll = Collimation() 1084 datainfo.collimation.append(coll) 1085 for item in datainfo.collimation: 1086 coll = self.create_element("SAScollimation") 1087 if item.name is not None: 1088 self.write_attribute(coll, "name", str(item.name)) 1089 self.append(coll, instr) 1090 1091 self.write_node(coll, "length", item.length, 1092 {"unit": item.length_unit}) 1093 1094 for aperture in item.aperture: 1095 apert = self.create_element("aperture") 1096 if aperture.name is not None: 1097 self.write_attribute(apert, "name", str(aperture.name)) 1098 if aperture.type is not None: 1099 self.write_attribute(apert, "type", str(aperture.type)) 1100 self.append(apert, coll) 1101 1102 size = self.create_element("size") 1103 if aperture.size_name is not None: 1104 self.write_attribute(size, "name", 1105 str(aperture.size_name)) 1106 written = self.write_node(size, "x", aperture.size.x, 1107 {"unit": aperture.size_unit}) 1108 written = written | self.write_node( \ 1109 size, "y", aperture.size.y, 1110 {"unit": aperture.size_unit}) 1111 written = written | self.write_node( \ 1112 size, "z", aperture.size.z, 1113 {"unit": aperture.size_unit}) 1114 if written == True: 1115 self.append(size, apert) 1116 1117 self.write_node(apert, "distance", aperture.distance, 1118 {"unit": aperture.distance_unit}) 1119 1120 def _write_detectors(self, datainfo, instr): 1121 """ 1122 Writes the detector information to the XML file 1123 1124 :param datainfo: The Data1D object the information is coming from 1125 :param inst: lxml instrument node to be appended to 1126 """ 1127 if datainfo.detector == None or datainfo.detector == []: 1128 det = Detector() 1129 det.name = "" 1130 datainfo.detector.append(det) 1131 1132 for item in datainfo.detector: 1133 det = self.create_element("SASdetector") 1134 written = self.write_node(det, "name", item.name) 1135 written = written | self.write_node(det, "SDD", item.distance, 1136 {"unit": item.distance_unit}) 1137 if written == True: 1138 self.append(det, instr) 1139 1140 off = self.create_element("offset") 1141 written = self.write_node(off, "x", item.offset.x, 1142 {"unit": item.offset_unit}) 1143 written = written | self.write_node(off, "y", item.offset.y, 1144 {"unit": item.offset_unit}) 1145 written = written | self.write_node(off, "z", item.offset.z, 1146 {"unit": item.offset_unit}) 1147 if written == True: 1148 self.append(off, det) 1149 1150 ori = self.create_element("orientation") 1151 written = self.write_node(ori, "roll", item.orientation.x, 1152 {"unit": item.orientation_unit}) 1153 written = written | self.write_node(ori, "pitch", 1154 item.orientation.y, 1155 {"unit": item.orientation_unit}) 1156 written = written | self.write_node(ori, "yaw", 1157 item.orientation.z, 1158 {"unit": item.orientation_unit}) 1159 if written == True: 1160 self.append(ori, det) 1161 1162 center = self.create_element("beam_center") 1163 written = self.write_node(center, "x", item.beam_center.x, 1164 {"unit": item.beam_center_unit}) 1165 written = written | self.write_node(center, "y", 1166 item.beam_center.y, 1167 {"unit": item.beam_center_unit}) 1168 written = written | self.write_node(center, "z", 1169 item.beam_center.z, 1170 {"unit": item.beam_center_unit}) 1171 if written == True: 1172 self.append(center, det) 1173 1174 pix = self.create_element("pixel_size") 1175 written = self.write_node(pix, "x", item.pixel_size.x, 1176 {"unit": item.pixel_size_unit}) 1177 written = written | self.write_node(pix, "y", item.pixel_size.y, 1178 {"unit": item.pixel_size_unit}) 1179 written = written | self.write_node(pix, "z", item.pixel_size.z, 1180 {"unit": item.pixel_size_unit}) 1181 written = written | self.write_node(det, "slit_length", 1182 item.slit_length, 1183 {"unit": item.slit_length_unit}) 1184 if written == True: 1185 self.append(pix, det) 1186 1187 def _write_process_notes(self, datainfo, entry_node): 1188 """ 1189 Writes the process notes to the XML file 1190 1191 :param datainfo: The Data1D object the information is coming from 1192 :param entry_node: lxml node ElementTree object to be appended to 1193 1194 """ 1195 for item in datainfo.process: 1196 node = self.create_element("SASprocess") 1197 self.append(node, entry_node) 1198 self.write_node(node, "name", item.name) 1199 self.write_node(node, "date", item.date) 1200 self.write_node(node, "description", item.description) 1201 for term in item.term: 1202 if isinstance(term, list): 1203 value = term['value'] 1204 del term['value'] 1205 elif isinstance(term, dict): 1206 value = term.get("value") 1207 del term['value'] 1208 else: 1209 value = term 1210 self.write_node(node, "term", value, term) 1211 for note in item.notes: 1212 self.write_node(node, "SASprocessnote", note) 1213 if len(item.notes) == 0: 1214 self.write_node(node, "SASprocessnote", "") 1215 1216 def _write_notes(self, datainfo, entry_node): 1217 """ 1218 Writes the notes to the XML file and creates an empty note if none 1219 exist 1220 1221 :param datainfo: The Data1D object the information is coming from 1222 :param entry_node: lxml node ElementTree object to be appended to 1223 1224 """ 1225 if len(datainfo.notes) == 0: 1226 node = self.create_element("SASnote") 1227 self.append(node, entry_node) 1228 else: 1229 for item in datainfo.notes: 1230 node = self.create_element("SASnote") 1231 self.write_text(node, item) 1232 self.append(node, entry_node) 1233 1234 def _check_origin(self, entry_node, doc, frm): 1235 """ 1236 Return the document, and the SASentry node associated with 1237 the data we just wrote. 1238 If the calling function was not the cansas reader, return a minidom 1239 object rather than an lxml object. 1240 1241 :param entry_node: lxml node ElementTree object to be appended to 1242 :param doc: entire xml tree 1243 """ 1244 if not frm: 1245 frm = inspect.stack()[1] 1246 mod_name = frm[1].replace("\\", "/").replace(".pyc", "") 1247 mod_name = mod_name.replace(".py", "") 1248 mod = mod_name.split("sas/") 1249 mod_name = mod[1] 1250 if mod_name != "sascalc/dataloader/readers/cansas_reader": 1251 string = self.to_string(doc, pretty_print=False) 1252 doc = parseString(string) 1253 node_name = entry_node.tag 1254 node_list = doc.getElementsByTagName(node_name) 1255 entry_node = node_list.item(0) 1256 return doc, entry_node 1113 1257 1114 1258 # DO NOT REMOVE - used in saving and loading panel states. … … 1195 1339 if entry is not None and entry.text is not None: 1196 1340 exec "storage.%s = entry.text.strip()" % variable 1341 1342 1343 # DO NOT REMOVE Called by outside packages: 1344 # sas.sasgui.perspectives.invariant.invariant_state 1345 # sas.sasgui.perspectives.fitting.pagestate 1346 def get_content(location, node): 1347 """ 1348 Get the first instance of the content of a xpath location. 1349 1350 :param location: xpath location 1351 :param node: node to start at 1352 1353 :return: Element, or None 1354 """ 1355 nodes = node.xpath(location, 1356 namespaces={'ns': CANSAS_NS.get("1.0").get("ns")}) 1357 if len(nodes) > 0: 1358 return nodes[0] 1359 else: 1360 return None 1361 1362 # DO NOT REMOVE Called by outside packages: 1363 # sas.sasgui.perspectives.fitting.pagestate 1364 def write_node(doc, parent, name, value, attr=None): 1365 """ 1366 :param doc: document DOM 1367 :param parent: parent node 1368 :param name: tag of the element 1369 :param value: value of the child text node 1370 :param attr: attribute dictionary 1371 1372 :return: True if something was appended, otherwise False 1373 """ 1374 if attr is None: 1375 attr = {} 1376 if value is not None: 1377 node = doc.createElement(name) 1378 node.appendChild(doc.createTextNode(str(value))) 1379 for item in attr: 1380 node.setAttribute(item, attr[item]) 1381 parent.appendChild(node) 1382 return True 1383 return False -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rd72567e r479799c 62 62 :return: List of Data1D/2D objects and/or a list of errors. 63 63 """ 64 65 64 ## Reinitialize the class when loading a new data file to reset all class variables 66 65 self.reset_class_variables() … … 136 135 ## If this is a dataset, store the data appropriately 137 136 data_set = data[key][:] 137 unit = self._get_unit(value) 138 139 ## I and Q Data 140 if key == u'I': 141 if type(self.current_dataset) is plottable_2D: 142 self.current_dataset.data = data_set 143 self.current_dataset.zaxis("Intensity", unit) 144 else: 145 self.current_dataset.y = data_set.flatten() 146 self.current_dataset.yaxis("Intensity", unit) 147 continue 148 elif key == u'Idev': 149 if type(self.current_dataset) is plottable_2D: 150 self.current_dataset.err_data = data_set.flatten() 151 else: 152 self.current_dataset.dy = data_set.flatten() 153 continue 154 elif key == u'Q': 155 self.current_dataset.xaxis("Q", unit) 156 if type(self.current_dataset) is plottable_2D: 157 self.current_dataset.q = data_set.flatten() 158 else: 159 self.current_dataset.x = data_set.flatten() 160 continue 161 elif key == u'Qy': 162 self.current_dataset.yaxis("Q_y", unit) 163 self.current_dataset.qy_data = data_set.flatten() 164 continue 165 elif key == u'Qydev': 166 self.current_dataset.dqy_data = data_set.flatten() 167 continue 168 elif key == u'Qx': 169 self.current_dataset.xaxis("Q_x", unit) 170 self.current_dataset.qx_data = data_set.flatten() 171 continue 172 elif key == u'Qxdev': 173 self.current_dataset.dqx_data = data_set.flatten() 174 continue 175 elif key == u'Mask': 176 self.current_dataset.mask = data_set.flatten() 177 continue 138 178 139 179 for data_point in data_set: 140 180 ## Top Level Meta Data 141 unit = self._get_unit(value)142 181 if key == u'definition': 143 182 self.current_datainfo.meta_data['reader'] = data_point … … 148 187 elif key == u'SASnote': 149 188 self.current_datainfo.notes.append(data_point) 150 151 ## I and Q Data152 elif key == u'I':153 if type(self.current_dataset) is plottable_2D:154 self.current_dataset.data = np.append(self.current_dataset.data, data_point)155 self.current_dataset.zaxis("Intensity", unit)156 else:157 self.current_dataset.y = np.append(self.current_dataset.y, data_point)158 self.current_dataset.yaxis("Intensity", unit)159 elif key == u'Idev':160 if type(self.current_dataset) is plottable_2D:161 self.current_dataset.err_data = np.append(self.current_dataset.err_data, data_point)162 else:163 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point)164 elif key == u'Q':165 self.current_dataset.xaxis("Q", unit)166 if type(self.current_dataset) is plottable_2D:167 self.current_dataset.q = np.append(self.current_dataset.q, data_point)168 else:169 self.current_dataset.x = np.append(self.current_dataset.x, data_point)170 elif key == u'Qy':171 self.current_dataset.yaxis("Q_y", unit)172 self.current_dataset.qy_data = np.append(self.current_dataset.qy_data, data_point)173 elif key == u'Qydev':174 self.current_dataset.dqy_data = np.append(self.current_dataset.dqy_data, data_point)175 elif key == u'Qx':176 self.current_dataset.xaxis("Q_x", unit)177 self.current_dataset.qx_data = np.append(self.current_dataset.qx_data, data_point)178 elif key == u'Qxdev':179 self.current_dataset.dqx_data = np.append(self.current_dataset.dqx_data, data_point)180 elif key == u'Mask':181 self.current_dataset.mask = np.append(self.current_dataset.mask, data_point)182 189 183 190 ## Sample Information … … 296 303 ## Type cast data arrays to float64 and find min/max as appropriate 297 304 for dataset in self.data2d: 298 dataset.data = np.delete(dataset.data, [0])299 305 dataset.data = dataset.data.astype(np.float64) 300 dataset.err_data = np.delete(dataset.err_data, [0])301 306 dataset.err_data = dataset.err_data.astype(np.float64) 302 dataset.mask = np.delete(dataset.mask, [0])303 307 if dataset.qx_data is not None: 304 dataset.qx_data = np.delete(dataset.qx_data, [0])305 308 dataset.xmin = np.min(dataset.qx_data) 306 309 dataset.xmax = np.max(dataset.qx_data) 307 310 dataset.qx_data = dataset.qx_data.astype(np.float64) 308 311 if dataset.dqx_data is not None: 309 dataset.dqx_data = np.delete(dataset.dqx_data, [0])310 312 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 311 313 if dataset.qy_data is not None: 312 dataset.qy_data = np.delete(dataset.qy_data, [0])313 314 dataset.ymin = np.min(dataset.qy_data) 314 315 dataset.ymax = np.max(dataset.qy_data) 315 316 dataset.qy_data = dataset.qy_data.astype(np.float64) 316 317 if dataset.dqy_data is not None: 317 dataset.dqy_data = np.delete(dataset.dqy_data, [0])318 318 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 319 319 if dataset.q_data is not None: 320 dataset.q_data = np.delete(dataset.q_data, [0])321 320 dataset.q_data = dataset.q_data.astype(np.float64) 322 321 zeros = np.ones(dataset.data.size, dtype=bool) … … 333 332 except: 334 333 dataset.q_data = None 334 335 if dataset.data.ndim == 2: 336 (n_rows, n_cols) = dataset.data.shape 337 dataset.y_bins = dataset.qy_data[0::n_cols] 338 dataset.x_bins = dataset.qx_data[:n_cols] 339 dataset.data = dataset.data.flatten() 340 335 341 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) 336 342 self.output.append(final_dataset) … … 338 344 for dataset in self.data1d: 339 345 if dataset.x is not None: 340 dataset.x = np.delete(dataset.x, [0])341 346 dataset.x = dataset.x.astype(np.float64) 342 347 dataset.xmin = np.min(dataset.x) 343 348 dataset.xmax = np.max(dataset.x) 344 349 if dataset.y is not None: 345 dataset.y = np.delete(dataset.y, [0])346 350 dataset.y = dataset.y.astype(np.float64) 347 351 dataset.ymin = np.min(dataset.y) 348 352 dataset.ymax = np.max(dataset.y) 349 353 if dataset.dx is not None: 350 dataset.dx = np.delete(dataset.dx, [0])351 354 dataset.dx = dataset.dx.astype(np.float64) 352 355 if dataset.dxl is not None: 353 dataset.dxl = np.delete(dataset.dxl, [0])354 356 dataset.dxl = dataset.dxl.astype(np.float64) 355 357 if dataset.dxw is not None: 356 dataset.dxw = np.delete(dataset.dxw, [0])357 358 dataset.dxw = dataset.dxw.astype(np.float64) 358 359 if dataset.dy is not None: 359 dataset.dy = np.delete(dataset.dy, [0])360 360 dataset.dy = dataset.dy.astype(np.float64) 361 361 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo)
Note: See TracChangeset
for help on using the changeset viewer.