- Timestamp:
- Aug 19, 2016 2:11:40 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- ac370c5
- Parents:
- 3d6ab79 (diff), 1686a333 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas
- Files:
-
- 18 added
- 1 deleted
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_constants.py
rd398285 r250fec92 27 27 return_me.current_level = self.CANSAS_FORMAT.get("SASentry") 28 28 # Defaults for variable and datatype 29 return_me.ns_variable = "{0}.meta_data[\"{2}\"] = \"{1}\""30 29 return_me.ns_datatype = "content" 31 30 return_me.ns_optional = True … … 38 37 return_me.current_level = \ 39 38 return_me.current_level.get("<any>", "") 40 cl_variable = return_me.current_level.get("variable", "")41 39 cl_datatype = return_me.current_level.get("storeas", "") 42 40 cl_units_optional = \ 43 return_me.current_level.get("units_ required", "")41 return_me.current_level.get("units_optional", "") 44 42 # Where are how to store the variable for the given 45 43 # namespace CANSAS_CONSTANTS tree is hierarchical, so 46 44 # is no value, inherit 47 return_me.ns_variable = cl_variable if cl_variable != "" \48 else return_me.ns_variable49 45 return_me.ns_datatype = cl_datatype if cl_datatype != "" \ 50 46 else return_me.ns_datatype … … 53 49 else return_me.ns_optional 54 50 except AttributeError: 55 return_me.ns_variable = "{0}.meta_data[\"{2}\"] = \"{1}\""56 51 return_me.ns_datatype = "content" 57 52 return_me.ns_optional = True … … 75 70 # The constants below hold information on where to store the CanSAS data 76 71 # when loaded in using sasview 77 META_DATA = "{0}.meta_data[\"{2}\"] = \"{1}\"" 78 ANY = {"variable" : "{0}.meta_data[\"{2}\"] = \'{1}\'", 79 "storeas" : "content", 80 } 81 TITLE = {"variable" : "{0}.title = \"{1}\""} 82 SASNOTE = {"variable" : "{0}.notes.append(\'{1}\')"} 83 SASPROCESS_TERM = {"variable" : None, 84 "attributes" : {"unit" : {"variable" : None}, 85 "name" : {"variable" : None} 86 } 87 } 88 SASPROCESS_SASPROCESSNOTE = {"variable" : None, 89 "children" : {"<any>" : ANY} 90 } 91 SASPROCESS = {"variable" : None, 92 "children" : {"name" : {"variable" : "{0}.name = \'{1}\'"}, 93 "date" : {"variable" : "{0}.date = \'{1}\'"}, 94 "description" : 95 {"variable" : "{0}.description = \'{1}\'"}, 72 ANY = {"storeas" : "content"} 73 TITLE = {} 74 SASNOTE = {} 75 SASPROCESS_TERM = {"attributes" : {"unit" : {}, "name" : {}}} 76 SASPROCESS_SASPROCESSNOTE = {"children" : {"<any>" : ANY}} 77 SASPROCESS = {"children" : {"name" : {}, 78 "date" : {}, 79 "description" : {}, 96 80 "term" : SASPROCESS_TERM, 97 81 "SASprocessnote" : SASPROCESS_SASPROCESSNOTE, … … 99 83 }, 100 84 } 101 RUN = {"variable" : "{0}.run.append(\"{1}\")", 102 "attributes" : {"name" : 103 {"variable" : "{0}.run_name[\"{3}\"] = \"{1}\""}} 104 } 105 SASDATA_IDATA_Q = {"variable" : "{0}.x = numpy.append({0}.x, {1})", 106 "unit" : "x_unit", 107 "attributes" : {"unit" : 108 {"variable" : "{0}.xaxis(\"Q\", \'{1}\')", 109 "storeas" : "content" 110 } 111 }, 112 } 113 SASDATA_IDATA_I = {"variable" : "{0}.y = numpy.append({0}.y, {1})", 114 "unit" : "y_unit", 115 "attributes" : {"unit" : 116 {"variable" : "{0}.yaxis(\"Intensity\", \'{1}\')", 117 "storeas" : "content" 118 } 119 }, 120 } 121 SASDATA_IDATA_IDEV = {"variable" : "{0}.dy = numpy.append({0}.dy, {1})", 85 RUN = {"attributes" : {"name" :{}}} 86 SASDATA_IDATA_Q = {"units_optional" : False, 87 "storeas" : "float", 88 "unit" : "x_unit", 89 "attributes" : {"unit" : {"storeas" : "content"}}, 90 } 91 SASDATA_IDATA_I = {"units_optional" : False, 92 "storeas" : "float", 93 "unit" : "y_unit", 94 "attributes" : {"unit" : {"storeas" : "content"}}, 95 } 96 SASDATA_IDATA_IDEV = {"units_optional" : False, 97 "storeas" : "float", 122 98 "unit" : "y_unit", 123 "attributes" : {"unit" : 124 {"variable" : META_DATA, 125 "storeas" : "content" 126 } 127 }, 128 } 129 SASDATA_IDATA_QDEV = {"variable" : "{0}.dx = numpy.append({0}.dx, {1})", 99 "attributes" : {"unit" : {"storeas" : "content"}}, 100 } 101 SASDATA_IDATA_QDEV = {"units_optional" : False, 102 "storeas" : "float", 130 103 "unit" : "x_unit", 131 "attributes" : {"unit" : 132 {"variable" : META_DATA, 133 "storeas" : "content" 134 } 135 }, 136 } 137 SASDATA_IDATA_DQL = {"variable" : "{0}.dxl = numpy.append({0}.dxl, {1})", 104 "attributes" : {"unit" : {"storeas" : "content"}}, 105 } 106 SASDATA_IDATA_DQL = {"units_optional" : False, 107 "storeas" : "float", 138 108 "unit" : "x_unit", 139 "attributes" : {"unit" : 140 {"variable" : META_DATA, 141 "storeas" : "content" 142 } 143 }, 144 } 145 SASDATA_IDATA_DQW = {"variable" : "{0}.dxw = numpy.append({0}.dxw, {1})", 109 "attributes" : {"unit" : {"storeas" : "content"}}, 110 } 111 SASDATA_IDATA_DQW = {"units_optional" : False, 112 "storeas" : "float", 146 113 "unit" : "x_unit", 147 "attributes" : {"unit" : 148 {"variable" : META_DATA, 149 "storeas" : "content" 150 } 151 }, 152 } 153 SASDATA_IDATA_QMEAN = {"storeas" : "content", 154 "unit" : "x_unit", 155 "variable" : META_DATA, 156 "attributes" : {"unit" : {"variable" : META_DATA}}, 114 "attributes" : {"unit" : {"storeas" : "content"}}, 115 } 116 SASDATA_IDATA_QMEAN = {"unit" : "x_unit", 117 "attributes" : {"unit" : {}}, 157 118 } 158 SASDATA_IDATA_SHADOWFACTOR = {"variable" : META_DATA, 159 "storeas" : "content", 160 } 161 SASDATA_IDATA = {"storeas" : "float", 162 "units_optional" : False, 163 "variable" : None, 164 "attributes" : {"name" : {"variable" : META_DATA, 165 "storeas" : "content", 166 }, 167 "timestamp" : {"variable" : META_DATA, 168 "storeas" : "timestamp", 169 } 170 }, 119 SASDATA_IDATA_SHADOWFACTOR = {} 120 SASDATA_IDATA = {"attributes" : {"name" : {},"timestamp" : {"storeas" : "timestamp"}}, 171 121 "children" : {"Q" : SASDATA_IDATA_Q, 172 122 "I" : SASDATA_IDATA_I, … … 180 130 } 181 131 } 182 SASDATA = {"attributes" : {"name" : { "variable" : META_DATA,}},132 SASDATA = {"attributes" : {"name" : {}}, 183 133 "variable" : None, 184 134 "children" : {"Idata" : SASDATA_IDATA, … … 186 136 } 187 137 } 188 SASTRANSSPEC_TDATA_LAMDBA = {" variable" : "{0}.wavelength.append({1})",138 SASTRANSSPEC_TDATA_LAMDBA = {"storeas" : "float", 189 139 "unit" : "wavelength_unit", 190 "attributes" : 191 {"unit" : 192 {"variable" : "{0}.wavelength_unit = \"{1}\"", 193 "storeas" : "content" 194 } 195 } 140 "attributes" : {"unit" : {"storeas" : "content"}} 196 141 } 197 SASTRANSSPEC_TDATA_T = {" variable" : "{0}.transmission.append({1})",142 SASTRANSSPEC_TDATA_T = {"storeas" : "float", 198 143 "unit" : "transmission_unit", 199 "attributes" : 200 {"unit" : 201 {"variable" : "{0}.transmission_unit = \"{1}\"", 202 "storeas" : "content" 203 } 204 } 205 } 206 SASTRANSSPEC_TDATA_TDEV = {"variable" : 207 "{0}.transmission_deviation.append({1})", 144 "attributes" : {"unit" : {"storeas" : "content"}} 145 } 146 SASTRANSSPEC_TDATA_TDEV = {"storeas" : "float", 208 147 "unit" : "transmission_deviation_unit", 209 "attributes" : 210 {"unit" : 211 {"variable" : 212 "{0}.transmission_deviation_unit = \"{1}\"", 213 "storeas" : "content" 214 } 215 } 216 } 217 SASTRANSSPEC_TDATA = {"storeas" : "float", 218 "variable" : None, 219 "children" : {"Lambda" : SASTRANSSPEC_TDATA_LAMDBA, 148 "attributes" : {"unit" :{"storeas" : "content"}} 149 } 150 SASTRANSSPEC_TDATA = {"children" : {"Lambda" : SASTRANSSPEC_TDATA_LAMDBA, 220 151 "T" : SASTRANSSPEC_TDATA_T, 221 152 "Tdev" : SASTRANSSPEC_TDATA_TDEV, … … 223 154 } 224 155 } 225 SASTRANSSPEC = {"variable" : None, 226 "children" : {"Tdata" : SASTRANSSPEC_TDATA, 156 SASTRANSSPEC = {"children" : {"Tdata" : SASTRANSSPEC_TDATA, 227 157 "<any>" : ANY, 228 158 }, 229 "attributes" : 230 {"name" : 231 {"variable" : "{0}.name = \"{1}\""}, 232 "timestamp" : 233 {"variable" : "{0}.timestamp = \"{1}\""}, 234 } 159 "attributes" : {"name" :{}, "timestamp" : {},} 235 160 } 236 SASSAMPLE_THICK = {"variable" : "{0}.sample.thickness = {1}", 237 "unit" : "sample.thickness_unit", 238 "storeas" : "float", 239 "attributes" : 240 {"unit" : 241 {"variable" : "{0}.sample.thickness_unit = \"{1}\"", 242 "storeas" : "content" 243 } 244 }, 245 } 246 SASSAMPLE_TRANS = {"variable" : "{0}.sample.transmission = {1}", 247 "storeas" : "float", 248 } 249 SASSAMPLE_TEMP = {"variable" : "{0}.sample.temperature = {1}", 250 "unit" : "sample.temperature_unit", 161 SASSAMPLE_THICK = {"unit" : "thickness_unit", 162 "storeas" : "float", 163 "attributes" : {"unit" :{}}, 164 } 165 SASSAMPLE_TRANS = {"storeas" : "float",} 166 SASSAMPLE_TEMP = {"unit" : "temperature_unit", 251 167 "storeas" : "float", 252 "attributes" : 253 {"unit" : 254 {"variable" : "{0}.sample.temperature_unit = \"{1}\"", 255 "storeas" : "content" 256 } 257 }, 168 "attributes" :{"unit" :{}}, 258 169 } 259 SASSAMPLE_POS_ATTR = {"unit" : {"variable" : 260 "{0}.sample.position_unit = \"{1}\"", 261 "storeas" : "content" 262 } 263 } 264 SASSAMPLE_POS_X = {"variable" : "{0}.sample.position.x = {1}", 265 "unit" : "sample.position_unit", 170 SASSAMPLE_POS_ATTR = {"unit" : {}} 171 SASSAMPLE_POS_X = {"unit" : "position_unit", 266 172 "storeas" : "float", 267 173 "attributes" : SASSAMPLE_POS_ATTR 268 174 } 269 SASSAMPLE_POS_Y = {"variable" : "{0}.sample.position.y = {1}", 270 "unit" : "sample.position_unit", 175 SASSAMPLE_POS_Y = {"unit" : "position_unit", 271 176 "storeas" : "float", 272 177 "attributes" : SASSAMPLE_POS_ATTR 273 178 } 274 SASSAMPLE_POS_Z = {"variable" : "{0}.sample.position.z = {1}", 275 "unit" : "sample.position_unit", 179 SASSAMPLE_POS_Z = {"unit" : "position_unit", 276 180 "storeas" : "float", 277 181 "attributes" : SASSAMPLE_POS_ATTR 278 182 } 279 SASSAMPLE_POS = {"children" : {"variable" : None, 280 "x" : SASSAMPLE_POS_X, 183 SASSAMPLE_POS = {"children" : {"x" : SASSAMPLE_POS_X, 281 184 "y" : SASSAMPLE_POS_Y, 282 185 "z" : SASSAMPLE_POS_Z, 283 186 }, 284 187 } 285 SASSAMPLE_ORIENT_ATTR = {"unit" : 286 {"variable" : 287 "{0}.sample.orientation_unit = \"{1}\"", 288 "storeas" : "content" 289 } 290 } 291 SASSAMPLE_ORIENT_ROLL = {"variable" : "{0}.sample.orientation.x = {1}", 292 "unit" : "sample.orientation_unit", 188 SASSAMPLE_ORIENT_ATTR = {"unit" :{}} 189 SASSAMPLE_ORIENT_ROLL = {"unit" : "orientation_unit", 293 190 "storeas" : "float", 294 191 "attributes" : SASSAMPLE_ORIENT_ATTR 295 192 } 296 SASSAMPLE_ORIENT_PITCH = {"variable" : "{0}.sample.orientation.y = {1}", 297 "unit" : "sample.orientation_unit", 193 SASSAMPLE_ORIENT_PITCH = {"unit" : "orientation_unit", 298 194 "storeas" : "float", 299 195 "attributes" : SASSAMPLE_ORIENT_ATTR 300 196 } 301 SASSAMPLE_ORIENT_YAW = {"variable" : "{0}.sample.orientation.z = {1}", 302 "unit" : "sample.orientation_unit", 197 SASSAMPLE_ORIENT_YAW = {"unit" : "orientation_unit", 303 198 "storeas" : "float", 304 199 "attributes" : SASSAMPLE_ORIENT_ATTR 305 200 } 306 SASSAMPLE_ORIENT = {"variable" : None, 307 "children" : {"roll" : SASSAMPLE_ORIENT_ROLL, 201 SASSAMPLE_ORIENT = {"children" : {"roll" : SASSAMPLE_ORIENT_ROLL, 308 202 "pitch" : SASSAMPLE_ORIENT_PITCH, 309 203 "yaw" : SASSAMPLE_ORIENT_YAW, … … 311 205 } 312 206 SASSAMPLE = {"attributes" : 313 {"name" : {"variable" : "{0}.sample.name = \"{1}\""},}, 314 "variable" : None, 315 "children" : {"ID" : {"variable" : "{0}.sample.ID = \"{1}\""}, 207 {"name" : {},}, 208 "children" : {"ID" : {}, 316 209 "thickness" : SASSAMPLE_THICK, 317 210 "transmission" : SASSAMPLE_TRANS, … … 319 212 "position" : SASSAMPLE_POS, 320 213 "orientation" : SASSAMPLE_ORIENT, 321 "details" : 322 {"variable" : 323 "{0}.sample.details.append(\"{1}\")"}, 214 "details" : {}, 324 215 "<any>" : ANY 325 216 }, 326 217 } 327 SASINSTR_SRC_BEAMSIZE_ATTR = {"unit" : 328 "{0}.source.beam_size_unit = \"{1}\"", 329 "storeas" : "content" 330 } 331 SASINSTR_SRC_BEAMSIZE_X = {"variable" : "{0}.source.beam_size.x = {1}", 332 "unit" : "source.beam_size_unit", 218 SASINSTR_SRC_BEAMSIZE_ATTR = {"unit" : ""} 219 SASINSTR_SRC_BEAMSIZE_X = {"unit" : "beam_size_unit", 333 220 "storeas" : "float", 334 221 "attributes" : SASINSTR_SRC_BEAMSIZE_ATTR 335 222 } 336 SASINSTR_SRC_BEAMSIZE_Y = {"variable" : "{0}.source.beam_size.y = {1}", 337 "unit" : "source.beam_size_unit", 223 SASINSTR_SRC_BEAMSIZE_Y = {"unit" : "beam_size_unit", 338 224 "storeas" : "float", 339 225 "attributes" : SASINSTR_SRC_BEAMSIZE_ATTR 340 226 } 341 SASINSTR_SRC_BEAMSIZE_Z = {"variable" : "{0}.source.beam_size.z = {1}", 342 "unit" : "source.beam_size_unit", 227 SASINSTR_SRC_BEAMSIZE_Z = {"unit" : "beam_size_unit", 343 228 "storeas" : "float", 344 229 "attributes" : SASINSTR_SRC_BEAMSIZE_ATTR 345 230 } 346 SASINSTR_SRC_BEAMSIZE = {"attributes" : 347 {"name" : {"variable" : 348 "{0}.source.beam_size_name = \"{1}\""}}, 349 "variable" : None, 231 SASINSTR_SRC_BEAMSIZE = {"attributes" : {"name" : {}}, 350 232 "children" : {"x" : SASINSTR_SRC_BEAMSIZE_X, 351 233 "y" : SASINSTR_SRC_BEAMSIZE_Y, … … 353 235 } 354 236 } 355 SASINSTR_SRC_WL = {"variable" : "{0}.source.wavelength = {1}", 356 "unit" : "source.wavelength_unit", 357 "storeas" : "float", 358 "attributes" : 359 {"unit" : 360 {"variable" : "{0}.source.wavelength_unit = \"{1}\"", 361 "storeas" : "content" 362 }, 237 SASINSTR_SRC_WL = {"unit" : "wavelength_unit", 238 "storeas" : "float", 239 "attributes" : {"unit" :{}, 363 240 } 364 241 } 365 SASINSTR_SRC_WL_MIN = {"variable" : "{0}.source.wavelength_min = {1}", 366 "unit" : "source.wavelength_min_unit", 242 SASINSTR_SRC_WL_MIN = {"unit" : "wavelength_min_unit", 367 243 "storeas" : "float", 368 "attributes" : 369 {"unit" : 370 {"variable" : 371 "{0}.source.wavelength_min_unit = \"{1}\"", 372 "storeas" : "content" 373 }, 374 } 244 "attributes" : {"unit" :{"storeas" : "content"},} 375 245 } 376 SASINSTR_SRC_WL_MAX = {"variable" : "{0}.source.wavelength_max = {1}", 377 "unit" : "source.wavelength_max_unit", 246 SASINSTR_SRC_WL_MAX = {"unit" : "wavelength_max_unit", 378 247 "storeas" : "float", 379 "attributes" : 380 {"unit" : 381 {"variable" : 382 "{0}.source.wavelength_max_unit = \"{1}\"", 383 "storeas" : "content" 384 }, 385 } 248 "attributes" : {"unit" :{"storeas" : "content"},} 386 249 } 387 SASINSTR_SRC_WL_SPR = {"variable" : "{0}.source.wavelength_spread = {1}", 388 "unit" : "source.wavelength_spread_unit", 250 SASINSTR_SRC_WL_SPR = {"unit" : "wavelength_spread_unit", 389 251 "storeas" : "float", 390 "attributes" : 391 {"unit" : 392 {"variable" : 393 "{0}.source.wavelength_spread_unit = \"{1}\"", 394 "storeas" : "content" 395 }, 396 } 252 "attributes" : {"unit" : {"storeas" : "content"},} 397 253 } 398 SASINSTR_SRC = {"attributes" : {"name" : {"variable" : 399 "{0}.source.name = \"{1}\""}}, 400 "variable" : None, 401 "children" : {"radiation" : 402 {"variable" : 403 "{0}.source.radiation = \"{1}\""}, 254 SASINSTR_SRC = {"attributes" : {"name" : {}}, 255 "children" : {"radiation" : {}, 404 256 "beam_size" : SASINSTR_SRC_BEAMSIZE, 405 "beam_shape" : 406 {"variable" : 407 "{0}.source.beam_shape = \"{1}\""}, 257 "beam_shape" : {}, 408 258 "wavelength" : SASINSTR_SRC_WL, 409 259 "wavelength_min" : SASINSTR_SRC_WL_MIN, … … 412 262 }, 413 263 } 414 SASINSTR_COLL_APER_ATTR = {"unit" : {"variable" : "{0}.size_unit = \"{1}\"", 415 "storeas" : "content" 416 }, 417 } 418 SASINSTR_COLL_APER_X = {"variable" : "{0}.size.x = {1}", 419 "unit" : "size_unit", 264 SASINSTR_COLL_APER_ATTR = {"unit" : {}} 265 SASINSTR_COLL_APER_X = {"unit" : "size_unit", 420 266 "storeas" : "float", 421 267 "attributes" : SASINSTR_COLL_APER_ATTR 422 268 } 423 SASINSTR_COLL_APER_Y = {"variable" : "{0}.size.y = {1}", 424 "unit" : "size_unit", 269 SASINSTR_COLL_APER_Y = {"unit" : "size_unit", 425 270 "storeas" : "float", 426 271 "attributes" : SASINSTR_COLL_APER_ATTR 427 272 } 428 SASINSTR_COLL_APER_Z = {"variable" : "{0}.size.z = {1}", 429 "unit" : "size_unit", 273 SASINSTR_COLL_APER_Z = {"unit" : "size_unit", 430 274 "storeas" : "float", 431 275 "attributes" : SASINSTR_COLL_APER_ATTR 432 276 } 433 SASINSTR_COLL_APER_SIZE = {"attributes" : 434 {"unit" : {"variable" : 435 "{0}.size_unit = \"{1}\""}}, 277 SASINSTR_COLL_APER_SIZE = {"attributes" : {"unit" : {}}, 436 278 "children" : {"storeas" : "float", 437 279 "x" : SASINSTR_COLL_APER_X, … … 441 283 } 442 284 SASINSTR_COLL_APER_DIST = {"storeas" : "float", 443 "attributes" : 444 {"storeas" : "content", 445 "unit" : {"variable" : 446 "{0}.distance_unit = \"{1}\""} 447 }, 448 "variable" : "{0}.distance = {1}", 285 "attributes" : {"unit" : {}}, 449 286 "unit" : "distance_unit", 450 287 } 451 SASINSTR_COLL_APER = {"variable" : None, 452 "attributes" : {"name" : {"variable" : 453 "{0}.name = \"{1}\""}, 454 "type" : {"variable" : 455 "{0}.type = \"{1}\""}, 456 }, 288 SASINSTR_COLL_APER = {"attributes" : {"name" : {}, "type" : {}, }, 457 289 "children" : {"size" : SASINSTR_COLL_APER_SIZE, 458 290 "distance" : SASINSTR_COLL_APER_DIST 459 291 } 460 292 } 461 SASINSTR_COLL = {"attributes" : 462 {"name" : {"variable" : "{0}.name = \"{1}\""}}, 463 "variable" : None, 293 SASINSTR_COLL = {"attributes" : {"name" : {}}, 464 294 "children" : 465 {"length" : 466 {"variable" : "{0}.length = {1}", 467 "unit" : "length_unit", 468 "storeas" : "float", 469 "attributes" : 470 {"storeas" : "content", 471 "unit" : {"variable" : "{0}.length_unit = \"{1}\""} 472 }, 473 }, 474 "aperture" : SASINSTR_COLL_APER, 475 }, 295 {"length" : 296 {"unit" : "length_unit", 297 "storeas" : "float", 298 "attributes" : {"storeas" : "content", "unit" : {}}, 299 }, 300 "aperture" : SASINSTR_COLL_APER, 301 }, 476 302 } 477 SASINSTR_DET_SDD = {" variable" : "{0}.distance = {1}",303 SASINSTR_DET_SDD = {"storeas" : "float", 478 304 "unit" : "distance_unit", 479 "attributes" : 480 {"unit" : 481 {"variable" : "{0}.distance_unit = \"{1}\"", 482 "storeas" : "content" 483 } 484 }, 305 "attributes" : {"unit" :{}}, 485 306 } 486 SASINSTR_DET_OFF_ATTR = {"unit" : {"variable" : "{0}.offset_unit = \"{1}\"", 487 "storeas" : "content" 488 }, 489 } 490 SASINSTR_DET_OFF_X = {"variable" : "{0}.offset.x = {1}", 307 SASINSTR_DET_OFF_ATTR = {"unit" : {"storeas" : "content" }} 308 SASINSTR_DET_OFF_X = {"storeas" : "float", 491 309 "unit" : "offset_unit", 492 310 "attributes" : SASINSTR_DET_OFF_ATTR 493 311 } 494 SASINSTR_DET_OFF_Y = {" variable" : "{0}.offset.y = {1}",312 SASINSTR_DET_OFF_Y = {"storeas" : "float", 495 313 "unit" : "offset_unit", 496 314 "attributes" : SASINSTR_DET_OFF_ATTR 497 315 } 498 SASINSTR_DET_OFF_Z = {" variable" : "{0}.offset.z = {1}",316 SASINSTR_DET_OFF_Z = {"storeas" : "float", 499 317 "unit" : "offset_unit", 500 318 "attributes" : SASINSTR_DET_OFF_ATTR 501 319 } 502 SASINSTR_DET_OFF = {"variable" : None, 503 "children" : {"x" : SASINSTR_DET_OFF_X, 320 SASINSTR_DET_OFF = {"children" : {"x" : SASINSTR_DET_OFF_X, 504 321 "y" : SASINSTR_DET_OFF_Y, 505 322 "z" : SASINSTR_DET_OFF_Z, 506 323 } 507 324 } 508 SASINSTR_DET_OR_ATTR = {"unit" : "{0}.orientation_unit = \"{1}\"", 509 "storeas" : "content" 510 } 511 SASINSTR_DET_OR_ROLL = {"variable" : "{0}.orientation.x = {1}", 325 SASINSTR_DET_OR_ATTR = {} 326 SASINSTR_DET_OR_ROLL = {"storeas" : "float", 512 327 "unit" : "orientation_unit", 513 328 "attributes" : SASINSTR_DET_OR_ATTR 514 329 } 515 SASINSTR_DET_OR_PITCH = {" variable" : "{0}.orientation.y = {1}",330 SASINSTR_DET_OR_PITCH = {"storeas" : "float", 516 331 "unit" : "orientation_unit", 517 332 "attributes" : SASINSTR_DET_OR_ATTR 518 333 } 519 SASINSTR_DET_OR_YAW = {" variable" : "{0}.orientation.z = {1}",334 SASINSTR_DET_OR_YAW = {"storeas" : "float", 520 335 "unit" : "orientation_unit", 521 336 "attributes" : SASINSTR_DET_OR_ATTR 522 337 } 523 SASINSTR_DET_OR = {"variable" : None, 524 "children" : {"roll" : SASINSTR_DET_OR_ROLL, 338 SASINSTR_DET_OR = {"children" : {"roll" : SASINSTR_DET_OR_ROLL, 525 339 "pitch" : SASINSTR_DET_OR_PITCH, 526 340 "yaw" : SASINSTR_DET_OR_YAW, 527 341 } 528 342 } 529 SASINSTR_DET_BC_X = {" variable" : "{0}.beam_center.x = {1}",343 SASINSTR_DET_BC_X = {"storeas" : "float", 530 344 "unit" : "beam_center_unit", 531 "attributes" : 532 {"unit" : "{0}.beam_center_unit = \"{1}\"", 533 "storeas" : "content" 534 } 535 } 536 SASINSTR_DET_BC_Y = {"variable" : "{0}.beam_center.y = {1}", 345 "attributes" : {"storeas" : "content"} 346 } 347 SASINSTR_DET_BC_Y = {"storeas" : "float", 537 348 "unit" : "beam_center_unit", 538 "attributes" : 539 {"unit" : "{0}.beam_center_unit = \"{1}\"", 540 "storeas" : "content" 541 } 542 } 543 SASINSTR_DET_BC_Z = {"variable" : "{0}.beam_center.z = {1}", 349 "attributes" : {"storeas" : "content"} 350 } 351 SASINSTR_DET_BC_Z = {"storeas" : "float", 544 352 "unit" : "beam_center_unit", 545 "attributes" : 546 {"unit" : "{0}.beam_center_unit = \"{1}\"", 547 "storeas" : "content" 548 } 549 } 550 SASINSTR_DET_BC = {"variable" : None, 551 "children" : {"x" : SASINSTR_DET_BC_X, 353 "attributes" : {"storeas" : "content"} 354 } 355 SASINSTR_DET_BC = {"children" : {"x" : SASINSTR_DET_BC_X, 552 356 "y" : SASINSTR_DET_BC_Y, 553 "z" : SASINSTR_DET_BC_Z, 554 } 555 } 556 SASINSTR_DET_PIXEL_X = {"variable" : "{0}.pixel_size.x = {1}", 357 "z" : SASINSTR_DET_BC_Z,} 358 } 359 SASINSTR_DET_PIXEL_X = {"storeas" : "float", 557 360 "unit" : "pixel_size_unit", 558 "attributes" : 559 {"unit" : "{0}.pixel_size_unit = \"{1}\"", 560 "storeas" : "content" 561 } 562 } 563 SASINSTR_DET_PIXEL_Y = {"variable" : "{0}.pixel_size.y = {1}", 361 "attributes" : {"storeas" : "content" } 362 } 363 SASINSTR_DET_PIXEL_Y = {"storeas" : "float", 564 364 "unit" : "pixel_size_unit", 565 "attributes" : 566 {"unit" : "{0}.pixel_size_unit = \"{1}\"", 567 "storeas" : "content" 568 } 569 } 570 SASINSTR_DET_PIXEL_Z = {"variable" : "{0}.pixel_size.z = {1}", 365 "attributes" : {"storeas" : "content"} 366 } 367 SASINSTR_DET_PIXEL_Z = {"storeas" : "float", 571 368 "unit" : "pixel_size_unit", 572 "attributes" : 573 {"unit" : "{0}.pixel_size_unit = \"{1}\"", 574 "storeas" : "content" 575 } 576 } 577 SASINSTR_DET_PIXEL = {"variable" : None, 578 "children" : {"x" : SASINSTR_DET_PIXEL_X, 369 "attributes" : {"storeas" : "content"} 370 } 371 SASINSTR_DET_PIXEL = {"children" : {"x" : SASINSTR_DET_PIXEL_X, 579 372 "y" : SASINSTR_DET_PIXEL_Y, 580 373 "z" : SASINSTR_DET_PIXEL_Z, 581 374 } 582 375 } 583 SASINSTR_DET_SLIT = {" variable" : "{0}.slit_length = {1}",376 SASINSTR_DET_SLIT = {"storeas" : "float", 584 377 "unit" : "slit_length_unit", 585 "attributes" : 586 {"unit" : 587 {"variable" : "{0}.slit_length_unit = \"{1}\"", 588 "storeas" : "content" 589 } 590 } 591 } 592 SASINSTR_DET = {"storeas" : "float", 593 "variable" : None, 594 "attributes" : {"name" : 595 {"storeas" : "content", 596 "variable" : "{0}.name = \"{1}\"", 597 } 598 }, 599 "children" : {"name" : {"storeas" : "content", 600 "variable" : "{0}.name = \"{1}\"", 601 }, 378 "attributes" : {"unit" : {}} 379 } 380 SASINSTR_DET = {"attributes" : {"name" : {"storeas" : "content"}}, 381 "children" : {"name" : {"storeas" : "content"}, 602 382 "SDD" : SASINSTR_DET_SDD, 603 383 "offset" : SASINSTR_DET_OFF, … … 608 388 } 609 389 } 610 SASINSTR = {"variable" : None, 611 "children" : 612 {"variable" : None, 613 "name" : {"variable" : "{0}.instrument = \"{1}\""}, 390 SASINSTR = {"children" : 391 {"name" : {}, 614 392 "SASsource" : SASINSTR_SRC, 615 393 "SAScollimation" : SASINSTR_COLL, … … 619 397 CANSAS_FORMAT = {"SASentry" : 620 398 {"units_optional" : True, 621 "variable" : None,622 399 "storeas" : "content", 623 "attributes" : {"name" : 624 {"variable" : 625 "{0}.run_name[\"{3}\"] = \"{1}\""}}, 400 "attributes" : {"name" : {}}, 626 401 "children" : {"Title" : TITLE, 627 402 "Run" : RUN, … … 644 419 645 420 current_level = '' 646 ns_variable = ''647 421 ns_datatype = '' 648 422 ns_optional = True … … 650 424 def __init__(self): 651 425 self.current_level = {} 652 self.ns_variable = ''653 426 self.ns_datatype = "content" 654 427 self.ns_optional = True -
src/sas/sascalc/dataloader/readers/cansas_reader.py
rfcba29a r7f71637 14 14 15 15 import logging 16 import numpy 16 import numpy as np 17 17 import os 18 18 import sys … … 20 20 import inspect 21 21 # For saving individual sections of data 22 from sas.sascalc.dataloader.data_info import Data1D 23 from sas.sascalc.dataloader.data_info import Collimation 24 from sas.sascalc.dataloader.data_info import TransmissionSpectrum 25 from sas.sascalc.dataloader.data_info import Detector 26 from sas.sascalc.dataloader.data_info import Process 27 from sas.sascalc.dataloader.data_info import Aperture 22 from sas.sascalc.dataloader.data_info import Data1D, DataInfo, plottable_1D 23 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector, Process, Aperture 24 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable as combine_data 28 25 import sas.sascalc.dataloader.readers.xml_reader as xml_reader 29 26 from sas.sascalc.dataloader.readers.xml_reader import XMLreader 30 from sas.sascalc.dataloader.readers.cansas_constants import CansasConstants 27 from sas.sascalc.dataloader.readers.cansas_constants import CansasConstants, CurrentLevel 31 28 32 29 # The following 2 imports *ARE* used. Do not remove either. … … 34 31 from xml.dom.minidom import parseString 35 32 36 ## TODO: Refactor to load multiple <SASData> as separate Data1D objects37 ## TODO: Refactor to allow invalid XML, but give a useful warning when loaded38 39 _ZERO = 1e-1640 33 PREPROCESS = "xmlpreprocess" 41 34 ENCODING = "encoding" 42 35 RUN_NAME_DEFAULT = "None" 36 INVALID_SCHEMA_PATH_1_1 = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_1.xsd" 37 INVALID_SCHEMA_PATH_1_0 = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_0.xsd" 38 INVALID_XML = "\n\nThe loaded xml file, {0} does not fully meet the CanSAS v1.x specification. SasView loaded " + \ 39 "as much of the data as possible.\n\n" 43 40 HAS_CONVERTER = True 44 41 try: … … 52 49 ALLOW_ALL = True 53 50 54 # DO NOT REMOVE Called by outside packages:55 # sas.sasgui.perspectives.invariant.invariant_state56 # sas.sasgui.perspectives.fitting.pagestate57 def get_content(location, node):58 """59 Get the first instance of the content of a xpath location.60 61 :param location: xpath location62 :param node: node to start at63 64 :return: Element, or None65 """66 nodes = node.xpath(location,67 namespaces={'ns': CANSAS_NS.get("1.0").get("ns")})68 if len(nodes) > 0:69 return nodes[0]70 else:71 return None72 73 # DO NOT REMOVE Called by outside packages:74 # sas.sasgui.perspectives.fitting.pagestate75 def write_node(doc, parent, name, value, attr=None):76 """77 :param doc: document DOM78 :param parent: parent node79 :param name: tag of the element80 :param value: value of the child text node81 :param attr: attribute dictionary82 83 :return: True if something was appended, otherwise False84 """85 if attr is None:86 attr = {}87 if value is not None:88 node = doc.createElement(name)89 node.appendChild(doc.createTextNode(str(value)))90 for item in attr:91 node.setAttribute(item, attr[item])92 parent.appendChild(node)93 return True94 return False95 96 51 class Reader(XMLreader): 97 52 """ … … 101 56 The CanSAS reader requires PyXML 0.8.4 or later. 102 57 """ 103 ## CanSAS version - defaults to version 1.058 ## CanSAS version - defaults to version 1.0 104 59 cansas_version = "1.0" 105 60 base_ns = "{cansas1d/1.0}" 61 cansas_defaults = None 62 type_name = "canSAS" 63 invalid = True 64 ## Log messages and errors 106 65 logging = None 107 errors = None 108 type_name = "canSAS" 66 errors = set() 67 ## Namespace hierarchy for current xml_file object 68 names = None 69 ns_list = None 70 ## Temporary storage location for loading multiple data sets in a single file 71 current_datainfo = None 72 current_dataset = None 73 current_data1d = None 74 data = None 75 ## List of data1D objects to be sent back to SasView 76 output = None 109 77 ## Wildcards 110 78 type = ["XML files (*.xml)|*.xml", "SasView Save Files (*.svs)|*.svs"] … … 114 82 allow_all = True 115 83 116 def __init__(self): 117 ## List of errors 118 self.errors = set() 84 def reset_state(self): 85 """ 86 Resets the class state to a base case when loading a new data file so previous 87 data files do not appear a second time 88 """ 89 self.current_datainfo = None 90 self.current_dataset = None 91 self.current_data1d = None 92 self.data = [] 93 self.process = Process() 94 self.transspectrum = TransmissionSpectrum() 95 self.aperture = Aperture() 96 self.collimation = Collimation() 97 self.detector = Detector() 98 self.names = [] 99 self.cansas_defaults = {} 100 self.output = [] 101 self.ns_list = None 119 102 self.logging = [] 120 103 self.encoding = None 104 105 def read(self, xml_file, schema_path="", invalid=True): 106 """ 107 Validate and read in an xml_file file in the canSAS format. 108 109 :param xml_file: A canSAS file path in proper XML format 110 :param schema_path: A file path to an XML schema to validate the xml_file against 111 """ 112 # For every file loaded, reset everything to a base state 113 self.reset_state() 114 self.invalid = invalid 115 # Check that the file exists 116 if os.path.isfile(xml_file): 117 basename, extension = os.path.splitext(os.path.basename(xml_file)) 118 # If the file type is not allowed, return nothing 119 if extension in self.ext or self.allow_all: 120 # Get the file location of 121 self.load_file_and_schema(xml_file, schema_path) 122 self.add_data_set() 123 # Try to load the file, but raise an error if unable to. 124 # Check the file matches the XML schema 125 try: 126 self.is_cansas(extension) 127 self.invalid = False 128 # Get each SASentry from XML file and add it to a list. 129 entry_list = self.xmlroot.xpath( 130 '/ns:SASroot/ns:SASentry', 131 namespaces={'ns': self.cansas_defaults.get("ns")}) 132 self.names.append("SASentry") 133 134 # Get all preprocessing events and encoding 135 self.set_processing_instructions() 136 137 # Parse each <SASentry> item 138 for entry in entry_list: 139 # Create a new DataInfo object for every <SASentry> 140 141 142 # Set the file name and then parse the entry. 143 self.current_datainfo.filename = basename + extension 144 self.current_datainfo.meta_data["loader"] = "CanSAS XML 1D" 145 self.current_datainfo.meta_data[PREPROCESS] = \ 146 self.processing_instructions 147 148 # Parse the XML SASentry 149 self._parse_entry(entry) 150 # Combine datasets with datainfo 151 self.add_data_set() 152 except RuntimeError: 153 # If the file does not match the schema, raise this error 154 invalid_xml = self.find_invalid_xml() 155 invalid_xml = INVALID_XML.format(basename + extension) + invalid_xml 156 self.errors.add(invalid_xml) 157 # Try again with an invalid CanSAS schema, that requires only a data set in each 158 base_name = xml_reader.__file__ 159 base_name = base_name.replace("\\", "/") 160 base = base_name.split("/sas/")[0] 161 if self.cansas_version == "1.1": 162 invalid_schema = INVALID_SCHEMA_PATH_1_1.format(base, self.cansas_defaults.get("schema")) 163 else: 164 invalid_schema = INVALID_SCHEMA_PATH_1_0.format(base, self.cansas_defaults.get("schema")) 165 self.set_schema(invalid_schema) 166 try: 167 if self.invalid: 168 if self.is_cansas(): 169 self.output = self.read(xml_file, invalid_schema, False) 170 else: 171 raise RuntimeError 172 else: 173 raise RuntimeError 174 except RuntimeError: 175 x = np.zeros(1) 176 y = np.zeros(1) 177 self.current_data1d = Data1D(x,y) 178 self.current_data1d.errors = self.errors 179 return [self.current_data1d] 180 else: 181 self.output.append("Not a valid file path.") 182 # Return a list of parsed entries that dataloader can manage 183 return self.output 184 185 def _parse_entry(self, dom): 186 """ 187 Parse a SASEntry - new recursive method for parsing the dom of 188 the CanSAS data format. This will allow multiple data files 189 and extra nodes to be read in simultaneously. 190 191 :param dom: dom object with a namespace base of names 192 """ 193 194 frm = inspect.stack()[1] 195 if not self._is_call_local(frm): 196 self.reset_state() 197 self.add_data_set() 198 self.names.append("SASentry") 199 self.parent_class = "SASentry" 200 self._check_for_empty_data() 201 self.base_ns = "{0}{1}{2}".format("{", \ 202 CANSAS_NS.get(self.cansas_version).get("ns"), "}") 203 tagname = '' 204 tagname_original = '' 205 206 # Go through each child in the parent element 207 for node in dom: 208 # Get the element name and set the current names level 209 tagname = node.tag.replace(self.base_ns, "") 210 tagname_original = tagname 211 # Skip this iteration when loading in save state information 212 if tagname == "fitting_plug_in" or tagname == "pr_inversion" or tagname == "invariant": 213 continue 214 215 # Get where to store content 216 self.names.append(tagname_original) 217 self.ns_list = CONSTANTS.iterate_namespace(self.names) 218 # If the element is a child element, recurse 219 if len(node.getchildren()) > 0: 220 self.parent_class = tagname_original 221 if tagname == 'SASdata': 222 self._initialize_new_data_set() 223 ## Recursion step to access data within the group 224 self._parse_entry(node) 225 self.add_intermediate() 226 else: 227 data_point, unit = self._get_node_value(node, tagname) 228 229 ## If this is a dataset, store the data appropriately 230 if tagname == 'Run': 231 self.current_datainfo.run.append(data_point) 232 elif tagname == 'Title': 233 self.current_datainfo.title = data_point 234 elif tagname == 'SASnote': 235 self.current_datainfo.notes.append(data_point) 236 237 ## I and Q Data 238 elif tagname == 'I': 239 self.current_dataset.yaxis("Intensity", unit) 240 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 241 elif tagname == 'Idev': 242 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 243 elif tagname == 'Q': 244 self.current_dataset.xaxis("Q", unit) 245 self.current_dataset.x = np.append(self.current_dataset.x, data_point) 246 elif tagname == 'Qdev': 247 self.current_dataset.dx = np.append(self.current_dataset.dx, data_point) 248 elif tagname == 'dQw': 249 self.current_dataset.dxw = np.append(self.current_dataset.dxw, data_point) 250 elif tagname == 'dQl': 251 self.current_dataset.dxl = np.append(self.current_dataset.dxl, data_point) 252 elif tagname == 'Qmean': 253 pass 254 elif tagname == 'Shadowfactor': 255 pass 256 257 ## Sample Information 258 elif tagname == 'ID' and self.parent_class == 'SASsample': 259 self.current_datainfo.sample.ID = data_point 260 elif tagname == 'Title' and self.parent_class == 'SASsample': 261 self.current_datainfo.sample.name = data_point 262 elif tagname == 'thickness' and self.parent_class == 'SASsample': 263 self.current_datainfo.sample.thickness = data_point 264 self.current_datainfo.sample.thickness_unit = unit 265 elif tagname == 'transmission' and self.parent_class == 'SASsample': 266 self.current_datainfo.sample.transmission = data_point 267 elif tagname == 'temperature' and self.parent_class == 'SASsample': 268 self.current_datainfo.sample.temperature = data_point 269 self.current_datainfo.sample.temperature_unit = unit 270 elif tagname == 'details' and self.parent_class == 'SASsample': 271 self.current_datainfo.sample.details.append(data_point) 272 elif tagname == 'x' and self.parent_class == 'position': 273 self.current_datainfo.sample.position.x = data_point 274 self.current_datainfo.sample.position_unit = unit 275 elif tagname == 'y' and self.parent_class == 'position': 276 self.current_datainfo.sample.position.y = data_point 277 self.current_datainfo.sample.position_unit = unit 278 elif tagname == 'z' and self.parent_class == 'position': 279 self.current_datainfo.sample.position.z = data_point 280 self.current_datainfo.sample.position_unit = unit 281 elif tagname == 'roll' and self.parent_class == 'orientation' and 'SASsample' in self.names: 282 self.current_datainfo.sample.orientation.x = data_point 283 self.current_datainfo.sample.orientation_unit = unit 284 elif tagname == 'pitch' and self.parent_class == 'orientation' and 'SASsample' in self.names: 285 self.current_datainfo.sample.orientation.y = data_point 286 self.current_datainfo.sample.orientation_unit = unit 287 elif tagname == 'yaw' and self.parent_class == 'orientation' and 'SASsample' in self.names: 288 self.current_datainfo.sample.orientation.z = data_point 289 self.current_datainfo.sample.orientation_unit = unit 290 291 ## Instrumental Information 292 elif tagname == 'name' and self.parent_class == 'SASinstrument': 293 self.current_datainfo.instrument = data_point 294 ## Detector Information 295 elif tagname == 'name' and self.parent_class == 'SASdetector': 296 self.detector.name = data_point 297 elif tagname == 'SDD' and self.parent_class == 'SASdetector': 298 self.detector.distance = data_point 299 self.detector.distance_unit = unit 300 elif tagname == 'slit_length' and self.parent_class == 'SASdetector': 301 self.detector.slit_length = data_point 302 self.detector.slit_length_unit = unit 303 elif tagname == 'x' and self.parent_class == 'offset': 304 self.detector.offset.x = data_point 305 self.detector.offset_unit = unit 306 elif tagname == 'y' and self.parent_class == 'offset': 307 self.detector.offset.y = data_point 308 self.detector.offset_unit = unit 309 elif tagname == 'z' and self.parent_class == 'offset': 310 self.detector.offset.z = data_point 311 self.detector.offset_unit = unit 312 elif tagname == 'x' and self.parent_class == 'beam_center': 313 self.detector.beam_center.x = data_point 314 self.detector.beam_center_unit = unit 315 elif tagname == 'y' and self.parent_class == 'beam_center': 316 self.detector.beam_center.y = data_point 317 self.detector.beam_center_unit = unit 318 elif tagname == 'z' and self.parent_class == 'beam_center': 319 self.detector.beam_center.z = data_point 320 self.detector.beam_center_unit = unit 321 elif tagname == 'x' and self.parent_class == 'pixel_size': 322 self.detector.pixel_size.x = data_point 323 self.detector.pixel_size_unit = unit 324 elif tagname == 'y' and self.parent_class == 'pixel_size': 325 self.detector.pixel_size.y = data_point 326 self.detector.pixel_size_unit = unit 327 elif tagname == 'z' and self.parent_class == 'pixel_size': 328 self.detector.pixel_size.z = data_point 329 self.detector.pixel_size_unit = unit 330 elif tagname == 'roll' and self.parent_class == 'orientation' and 'SASdetector' in self.names: 331 self.detector.orientation.x = data_point 332 self.detector.orientation_unit = unit 333 elif tagname == 'pitch' and self.parent_class == 'orientation' and 'SASdetector' in self.names: 334 self.detector.orientation.y = data_point 335 self.detector.orientation_unit = unit 336 elif tagname == 'yaw' and self.parent_class == 'orientation' and 'SASdetector' in self.names: 337 self.detector.orientation.z = data_point 338 self.detector.orientation_unit = unit 339 ## Collimation and Aperture 340 elif tagname == 'length' and self.parent_class == 'SAScollimation': 341 self.collimation.length = data_point 342 self.collimation.length_unit = unit 343 elif tagname == 'name' and self.parent_class == 'SAScollimation': 344 self.collimation.name = data_point 345 elif tagname == 'distance' and self.parent_class == 'aperture': 346 self.aperture.distance = data_point 347 self.aperture.distance_unit = unit 348 elif tagname == 'x' and self.parent_class == 'size': 349 self.aperture.size.x = data_point 350 self.collimation.size_unit = unit 351 elif tagname == 'y' and self.parent_class == 'size': 352 self.aperture.size.y = data_point 353 self.collimation.size_unit = unit 354 elif tagname == 'z' and self.parent_class == 'size': 355 self.aperture.size.z = data_point 356 self.collimation.size_unit = unit 357 358 ## Process Information 359 elif tagname == 'name' and self.parent_class == 'SASprocess': 360 self.process.name = data_point 361 elif tagname == 'description' and self.parent_class == 'SASprocess': 362 self.process.description = data_point 363 elif tagname == 'date' and self.parent_class == 'SASprocess': 364 try: 365 self.process.date = datetime.datetime.fromtimestamp(data_point) 366 except: 367 self.process.date = data_point 368 elif tagname == 'SASprocessnote': 369 self.process.notes.append(data_point) 370 elif tagname == 'term' and self.parent_class == 'SASprocess': 371 self.process.term.append(data_point) 372 373 ## Transmission Spectrum 374 elif tagname == 'T' and self.parent_class == 'Tdata': 375 self.transspectrum.transmission = np.append(self.transspectrum.transmission, data_point) 376 self.transspectrum.transmission_unit = unit 377 elif tagname == 'Tdev' and self.parent_class == 'Tdata': 378 self.transspectrum.transmission_deviation = np.append(self.transspectrum.transmission_deviation, data_point) 379 self.transspectrum.transmission_deviation_unit = unit 380 elif tagname == 'Lambda' and self.parent_class == 'Tdata': 381 self.transspectrum.wavelength = np.append(self.transspectrum.wavelength, data_point) 382 self.transspectrum.wavelength_unit = unit 383 384 ## Source Information 385 elif tagname == 'wavelength' and (self.parent_class == 'SASsource' or self.parent_class == 'SASData'): 386 self.current_datainfo.source.wavelength = data_point 387 self.current_datainfo.source.wavelength_unit = unit 388 elif tagname == 'wavelength_min' and self.parent_class == 'SASsource': 389 self.current_datainfo.source.wavelength_min = data_point 390 self.current_datainfo.source.wavelength_min_unit = unit 391 elif tagname == 'wavelength_max' and self.parent_class == 'SASsource': 392 self.current_datainfo.source.wavelength_max = data_point 393 self.current_datainfo.source.wavelength_max_unit = unit 394 elif tagname == 'wavelength_spread' and self.parent_class == 'SASsource': 395 self.current_datainfo.source.wavelength_spread = data_point 396 self.current_datainfo.source.wavelength_spread_unit = unit 397 elif tagname == 'x' and self.parent_class == 'beam_size': 398 self.current_datainfo.source.beam_size.x = data_point 399 self.current_datainfo.source.beam_size_unit = unit 400 elif tagname == 'y' and self.parent_class == 'beam_size': 401 self.current_datainfo.source.beam_size.y = data_point 402 self.current_datainfo.source.beam_size_unit = unit 403 elif tagname == 'z' and self.parent_class == 'pixel_size': 404 self.current_datainfo.source.data_point.z = data_point 405 self.current_datainfo.source.beam_size_unit = unit 406 elif tagname == 'radiation' and self.parent_class == 'SASsource': 407 self.current_datainfo.source.radiation = data_point 408 elif tagname == 'beam_shape' and self.parent_class == 'SASsource': 409 self.current_datainfo.source.beam_shape = data_point 410 411 ## Everything else goes in meta_data 412 else: 413 new_key = self._create_unique_key(self.current_datainfo.meta_data, tagname) 414 self.current_datainfo.meta_data[new_key] = data_point 415 416 self.names.remove(tagname_original) 417 length = 0 418 if len(self.names) > 1: 419 length = len(self.names) - 1 420 self.parent_class = self.names[length] 421 if not self._is_call_local(frm): 422 self.add_data_set() 423 empty = None 424 if self.output[0].dx is not None: 425 self.output[0].dxl = np.empty(0) 426 self.output[0].dxw = np.empty(0) 427 else: 428 self.output[0].dx = np.empty(0) 429 return self.output[0], empty 430 431 432 def _is_call_local(self, frm=""): 433 """ 434 435 :return: 436 """ 437 if frm == "": 438 frm = inspect.stack()[1] 439 mod_name = frm[1].replace("\\", "/").replace(".pyc", "") 440 mod_name = mod_name.replace(".py", "") 441 mod = mod_name.split("sas/") 442 mod_name = mod[1] 443 if mod_name != "sascalc/dataloader/readers/cansas_reader": 444 return False 445 return True 121 446 122 447 def is_cansas(self, ext="xml"): … … 134 459 if ext == "svs": 135 460 return True 136 r eturn False461 raise RuntimeError 137 462 138 463 def load_file_and_schema(self, xml_file, schema_path=""): 139 464 """ 140 Loads the file and associates a schema, if a known schemaexists465 Loads the file and associates a schema, if a schema is passed in or if one already exists 141 466 142 467 :param xml_file: The xml file path sent to Reader.read 468 :param schema_path: The path to a schema associated with the xml_file, or find one based on the file 143 469 """ 144 470 base_name = xml_reader.__file__ … … 151 477 152 478 # Generic values for the cansas file based on the version 153 cansas_defaults = CANSAS_NS.get(self.cansas_version, "1.0")479 self.cansas_defaults = CANSAS_NS.get(self.cansas_version, "1.0") 154 480 if schema_path == "": 155 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/{1}".format \156 (base, cansas_defaults.get("schema")).replace("\\", "/")481 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/{1}".format \ 482 (base, self.cansas_defaults.get("schema")).replace("\\", "/") 157 483 158 484 # Link a schema to the XML file. 159 485 self.set_schema(schema_path) 160 return cansas_defaults 161 162 ## TODO: Test loading invalid CanSAS XML files and see if this works 163 ## TODO: Once works, try adding a warning that the data is invalid 164 def read(self, xml_file, schema_path=""): 165 """ 166 Validate and read in an xml_file file in the canSAS format. 167 168 :param xml_file: A canSAS file path in proper XML format 169 """ 170 # output - Final list of Data1D objects 171 output = [] 172 # ns - Namespace hierarchy for current xml_file object 173 ns_list = [] 174 175 # Check that the file exists 176 if os.path.isfile(xml_file): 177 basename = os.path.basename(xml_file) 178 _, extension = os.path.splitext(basename) 179 # If the file type is not allowed, return nothing 180 if extension in self.ext or self.allow_all: 181 # Get the file location of 182 cansas_defaults = self.load_file_and_schema(xml_file, schema_path) 183 184 # Try to load the file, but raise an error if unable to. 185 # Check the file matches the XML schema 186 try: 187 if self.is_cansas(extension): 188 # Get each SASentry from XML file and add it to a list. 189 entry_list = self.xmlroot.xpath( 190 '/ns:SASroot/ns:SASentry', 191 namespaces={'ns': cansas_defaults.get("ns")}) 192 ns_list.append("SASentry") 193 194 # If multiple files, modify the name for each is unique 195 increment = 0 196 # Parse each SASentry item 197 for entry in entry_list: 198 # Define a new Data1D object with zeroes for 199 # x_vals and y_vals 200 data1d = Data1D(numpy.empty(0), numpy.empty(0), 201 numpy.empty(0), numpy.empty(0)) 202 data1d.dxl = numpy.empty(0) 203 data1d.dxw = numpy.empty(0) 204 205 # If more than one SASentry, increment each in order 206 name = basename 207 if len(entry_list) - 1 > 0: 208 name += "_{0}".format(increment) 209 increment += 1 210 211 # Set the Data1D name and then parse the entry. 212 # The entry is appended to a list of entry values 213 data1d.filename = name 214 data1d.meta_data["loader"] = "CanSAS 1D" 215 216 # Get all preprocessing events and encoding 217 self.set_processing_instructions() 218 data1d.meta_data[PREPROCESS] = \ 219 self.processing_instructions 220 221 # Parse the XML file 222 return_value, extras = \ 223 self._parse_entry(entry, ns_list, data1d) 224 del extras[:] 225 226 return_value = self._final_cleanup(return_value) 227 output.append(return_value) 228 else: 229 raise RuntimeError, "Invalid XML at: {0}".format(\ 230 self.find_invalid_xml()) 231 except: 232 # If the file does not match the schema, raise this error 233 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid.xsd" 234 invalid_xml = self.find_invalid_xml() 235 invalid_xml = "\n\nThe loaded xml file does not fully meet the CanSAS v1.x specification. SasView " + \ 236 "loaded as much of the data as possible.\n\n" + invalid_xml 237 self.errors.add(invalid_xml) 238 self.set_schema(schema_path) 239 if self.is_cansas(): 240 output = self.read(xml_file, schema_path) 241 else: 242 raise RuntimeError, "%s cannot be read" % xml_file 243 return output 244 # Return a list of parsed entries that dataloader can manage 245 return None 246 247 def _final_cleanup(self, data1d): 486 487 def add_data_set(self): 488 """ 489 Adds the current_dataset to the list of outputs after preforming final processing on the data and then calls a 490 private method to generate a new data set. 491 492 :param key: NeXus group name for current tree level 493 """ 494 495 if self.current_datainfo and self.current_dataset: 496 self._final_cleanup() 497 self.data = [] 498 self.current_datainfo = DataInfo() 499 500 def _initialize_new_data_set(self, parent_list=None): 501 """ 502 A private class method to generate a new 1D data object. 503 Outside methods should call add_data_set() to be sure any existing data is stored properly. 504 505 :param parent_list: List of names of parent elements 506 """ 507 508 if parent_list is None: 509 parent_list = [] 510 x = np.array(0) 511 y = np.array(0) 512 self.current_dataset = plottable_1D(x, y) 513 514 def add_intermediate(self): 515 """ 516 This method stores any intermediate objects within the final data set after fully reading the set. 517 518 :param parent: The NXclass name for the h5py Group object that just finished being processed 519 """ 520 521 if self.parent_class == 'SASprocess': 522 self.current_datainfo.process.append(self.process) 523 self.process = Process() 524 elif self.parent_class == 'SASdetector': 525 self.current_datainfo.detector.append(self.detector) 526 self.detector = Detector() 527 elif self.parent_class == 'SAStransmission_spectrum': 528 self.current_datainfo.trans_spectrum.append(self.transspectrum) 529 self.transspectrum = TransmissionSpectrum() 530 elif self.parent_class == 'SAScollimation': 531 self.current_datainfo.collimation.append(self.collimation) 532 self.collimation = Collimation() 533 elif self.parent_class == 'SASaperture': 534 self.collimation.aperture.append(self.aperture) 535 self.aperture = Aperture() 536 elif self.parent_class == 'SASdata': 537 self._check_for_empty_resolution() 538 self.data.append(self.current_dataset) 539 540 def _final_cleanup(self): 248 541 """ 249 542 Final cleanup of the Data1D object to be sure it has all the 250 543 appropriate information needed for perspectives 251 252 :param data1d: Data1D object that has been populated 253 """ 254 # Final cleanup 255 # Remove empty nodes, verify array sizes are correct 544 """ 545 546 ## Append errors to dataset and reset class errors 547 self.current_datainfo.errors = set() 256 548 for error in self.errors: 257 data1d.errors.append(error)549 self.current_datainfo.errors.add(error) 258 550 self.errors.clear() 259 numpy.trim_zeros(data1d.x) 260 numpy.trim_zeros(data1d.y) 261 numpy.trim_zeros(data1d.dy) 262 size_dx = data1d.dx.size 263 size_dxl = data1d.dxl.size 264 size_dxw = data1d.dxw.size 265 if data1d._xunit != data1d.x_unit: 266 data1d.x_unit = data1d._xunit 267 if data1d._yunit != data1d.y_unit: 268 data1d.y_unit = data1d._yunit 269 if size_dxl == 0 and size_dxw == 0: 270 data1d.dxl = None 271 data1d.dxw = None 272 numpy.trim_zeros(data1d.dx) 273 elif size_dx == 0: 274 data1d.dx = None 275 size_dx = size_dxl 276 numpy.trim_zeros(data1d.dxl) 277 numpy.trim_zeros(data1d.dxw) 278 return data1d 551 552 ## Combine all plottables with datainfo and append each to output 553 ## Type cast data arrays to float64 and find min/max as appropriate 554 for dataset in self.data: 555 if dataset.x is not None: 556 dataset.x = np.delete(dataset.x, [0]) 557 dataset.x = dataset.x.astype(np.float64) 558 dataset.xmin = np.min(dataset.x) 559 dataset.xmax = np.max(dataset.x) 560 if dataset.y is not None: 561 dataset.y = np.delete(dataset.y, [0]) 562 dataset.y = dataset.y.astype(np.float64) 563 dataset.ymin = np.min(dataset.y) 564 dataset.ymax = np.max(dataset.y) 565 if dataset.dx is not None: 566 dataset.dx = np.delete(dataset.dx, [0]) 567 dataset.dx = dataset.dx.astype(np.float64) 568 if dataset.dxl is not None: 569 dataset.dxl = np.delete(dataset.dxl, [0]) 570 dataset.dxl = dataset.dxl.astype(np.float64) 571 if dataset.dxw is not None: 572 dataset.dxw = np.delete(dataset.dxw, [0]) 573 dataset.dxw = dataset.dxw.astype(np.float64) 574 if dataset.dy is not None: 575 dataset.dy = np.delete(dataset.dy, [0]) 576 dataset.dy = dataset.dy.astype(np.float64) 577 np.trim_zeros(dataset.x) 578 np.trim_zeros(dataset.y) 579 np.trim_zeros(dataset.dy) 580 final_dataset = combine_data(dataset, self.current_datainfo) 581 self.output.append(final_dataset) 279 582 280 583 def _create_unique_key(self, dictionary, name, numb=0): 281 584 """ 282 585 Create a unique key value for any dictionary to prevent overwriting 283 Recurse suntil a unique key value is found.586 Recurse until a unique key value is found. 284 587 285 588 :param dictionary: A dictionary with any number of entries … … 294 597 return name 295 598 296 def _unit_conversion(self, node, new_current_level, data1d, \ 297 tagname, node_value): 599 def _get_node_value(self, node, tagname): 600 """ 601 Get the value of a node and any applicable units 602 603 :param node: The XML node to get the value of 604 :param tagname: The tagname of the node 605 """ 606 #Get the text from the node and convert all whitespace to spaces 607 units = '' 608 node_value = node.text 609 if node_value is not None: 610 node_value = ' '.join(node_value.split()) 611 else: 612 node_value = "" 613 614 # If the value is a float, compile with units. 615 if self.ns_list.ns_datatype == "float": 616 # If an empty value is given, set as zero. 617 if node_value is None or node_value.isspace() \ 618 or node_value.lower() == "nan": 619 node_value = "0.0" 620 #Convert the value to the base units 621 node_value, units = self._unit_conversion(node, tagname, node_value) 622 623 # If the value is a timestamp, convert to a datetime object 624 elif self.ns_list.ns_datatype == "timestamp": 625 if node_value is None or node_value.isspace(): 626 pass 627 else: 628 try: 629 node_value = \ 630 datetime.datetime.fromtimestamp(node_value) 631 except ValueError: 632 node_value = None 633 return node_value, units 634 635 def _unit_conversion(self, node, tagname, node_value): 298 636 """ 299 637 A unit converter method used to convert the data included in the file 300 638 to the default units listed in data_info 301 639 302 :param new_current_level: cansas_constants level as returned by 303 iterate_namespace 304 :param attr: The attributes of the node 305 :param data1d: Where the values will be saved 640 :param node: XML node 641 :param tagname: name of the node 306 642 :param node_value: The value of the current dom node 307 643 """ … … 310 646 err_msg = None 311 647 default_unit = None 312 if 'unit' in attr and new_current_level.get('unit') is not None:648 if 'unit' in attr and attr.get('unit') is not None and not self.ns_list.ns_optional: 313 649 try: 314 650 local_unit = attr['unit'] 315 if isinstance(node_value, float) is False: 316 exec("node_value = float({0})".format(node_value)) 317 unitname = new_current_level.get("unit") 318 exec "default_unit = data1d.{0}".format(unitname) 319 if local_unit is not None and default_unit is not None and \ 320 local_unit.lower() != default_unit.lower() \ 651 if not isinstance(node_value, float): 652 node_value = float(node_value) 653 unitname = self.ns_list.current_level.get("unit", "") 654 if "SASdetector" in self.names: 655 save_in = "detector" 656 elif "aperture" in self.names: 657 save_in = "aperture" 658 elif "SAScollimation" in self.names: 659 save_in = "collimation" 660 elif "SAStransmission_spectrum" in self.names: 661 save_in = "transspectrum" 662 elif "SASdata" in self.names: 663 x = np.zeros(1) 664 y = np.zeros(1) 665 self.current_data1d = Data1D(x, y) 666 save_in = "current_data1d" 667 elif "SASsource" in self.names: 668 save_in = "current_datainfo.source" 669 elif "SASsample" in self.names: 670 save_in = "current_datainfo.sample" 671 elif "SASprocess" in self.names: 672 save_in = "process" 673 else: 674 save_in = "current_datainfo" 675 exec "default_unit = self.{0}.{1}".format(save_in, unitname) 676 if local_unit and default_unit and local_unit.lower() != default_unit.lower() \ 321 677 and local_unit.lower() != "none": 322 678 if HAS_CONVERTER == True: … … 345 701 if err_msg: 346 702 self.errors.add(err_msg) 347 node_value = "float({0})".format(node_value)348 703 return node_value, value_unit 349 704 350 def _check_for_empty_data(self , data1d):705 def _check_for_empty_data(self): 351 706 """ 352 707 Creates an empty data set if no data is passed to the reader … … 354 709 :param data1d: presumably a Data1D object 355 710 """ 356 if data1d == None: 357 self.errors = set() 358 x_vals = numpy.empty(0) 359 y_vals = numpy.empty(0) 360 dx_vals = numpy.empty(0) 361 dy_vals = numpy.empty(0) 362 dxl = numpy.empty(0) 363 dxw = numpy.empty(0) 364 data1d = Data1D(x_vals, y_vals, dx_vals, dy_vals) 365 data1d.dxl = dxl 366 data1d.dxw = dxw 367 return data1d 368 369 def _handle_special_cases(self, tagname, data1d, children): 370 """ 371 Handle cases where the data type in Data1D is a dictionary or list 372 373 :param tagname: XML tagname in use 374 :param data1d: The original Data1D object 375 :param children: Child nodes of node 376 :param node: existing node with tag name 'tagname' 377 """ 378 if tagname == "SASdetector": 379 data1d = Detector() 380 elif tagname == "SAScollimation": 381 data1d = Collimation() 382 elif tagname == "SAStransmission_spectrum": 383 data1d = TransmissionSpectrum() 384 elif tagname == "SASprocess": 385 data1d = Process() 386 for child in children: 387 if child.tag.replace(self.base_ns, "") == "term": 388 term_attr = {} 389 for attr in child.keys(): 390 term_attr[attr] = \ 391 ' '.join(child.get(attr).split()) 392 if child.text is not None: 393 term_attr['value'] = \ 394 ' '.join(child.text.split()) 395 data1d.term.append(term_attr) 396 elif tagname == "aperture": 397 data1d = Aperture() 398 if tagname == "Idata" and children is not None: 399 data1d = self._check_for_empty_resolution(data1d, children) 400 return data1d 401 402 def _check_for_empty_resolution(self, data1d, children): 711 if self.current_dataset == None: 712 x_vals = np.empty(0) 713 y_vals = np.empty(0) 714 dx_vals = np.empty(0) 715 dy_vals = np.empty(0) 716 dxl = np.empty(0) 717 dxw = np.empty(0) 718 self.current_dataset = plottable_1D(x_vals, y_vals, dx_vals, dy_vals) 719 self.current_dataset.dxl = dxl 720 self.current_dataset.dxw = dxw 721 722 def _check_for_empty_resolution(self): 403 723 """ 404 724 A method to check all resolution data sets are the same size as I and Q … … 408 728 dq_exists = False 409 729 di_exists = False 410 for child in children: 411 tag = child.tag.replace(self.base_ns, "") 412 if tag == "dQl": 413 dql_exists = True 414 if tag == "dQw": 415 dqw_exists = True 416 if tag == "Qdev": 417 dq_exists = True 418 if tag == "Idev": 419 di_exists = True 420 if dqw_exists and dql_exists == False: 421 data1d.dxl = numpy.append(data1d.dxl, 0.0) 422 elif dql_exists and dqw_exists == False: 423 data1d.dxw = numpy.append(data1d.dxw, 0.0) 424 elif dql_exists == False and dqw_exists == False \ 425 and dq_exists == False: 426 data1d.dx = numpy.append(data1d.dx, 0.0) 427 if di_exists == False: 428 data1d.dy = numpy.append(data1d.dy, 0.0) 429 return data1d 430 431 def _restore_original_case(self, 432 tagname_original, 433 tagname, 434 save_data1d, 435 data1d): 436 """ 437 Save the special case data to the appropriate location and restore 438 the original Data1D object 439 440 :param tagname_original: Unmodified tagname for the node 441 :param tagname: modified tagname for the node 442 :param save_data1d: The original Data1D object 443 :param data1d: If a special case was handled, an object of that type 444 """ 445 if tagname_original == "SASdetector": 446 save_data1d.detector.append(data1d) 447 elif tagname_original == "SAScollimation": 448 save_data1d.collimation.append(data1d) 449 elif tagname == "SAStransmission_spectrum": 450 save_data1d.trans_spectrum.append(data1d) 451 elif tagname_original == "SASprocess": 452 save_data1d.process.append(data1d) 453 elif tagname_original == "aperture": 454 save_data1d.aperture.append(data1d) 455 else: 456 save_data1d = data1d 457 return save_data1d 458 459 def _handle_attributes(self, node, data1d, cs_values, tagname): 460 """ 461 Process all of the attributes for a node 462 """ 463 attr = node.attrib 464 if attr is not None: 465 for key in node.keys(): 466 try: 467 node_value, unit = self._get_node_value(node, cs_values, \ 468 data1d, tagname) 469 cansas_attrib = \ 470 cs_values.current_level.get("attributes").get(key) 471 attrib_variable = cansas_attrib.get("variable") 472 if key == 'unit' and unit != '': 473 attrib_value = unit 474 else: 475 attrib_value = node.attrib[key] 476 store_attr = attrib_variable.format("data1d", 477 attrib_value, 478 key, 479 node_value) 480 exec store_attr 481 except AttributeError: 482 pass 483 return data1d 484 485 def _get_node_value(self, node, cs_values, data1d, tagname): 486 """ 487 Get the value of a node and any applicable units 488 489 :param node: The XML node to get the value of 490 :param cs_values: A CansasConstants.CurrentLevel object 491 :param attr: The node attributes 492 :param dataid: The working object to be modified 493 :param tagname: The tagname of the node 494 """ 495 #Get the text from the node and convert all whitespace to spaces 496 units = '' 497 node_value = node.text 498 if node_value == "": 499 node_value = None 500 if node_value is not None: 501 node_value = ' '.join(node_value.split()) 502 503 # If the value is a float, compile with units. 504 if cs_values.ns_datatype == "float": 505 # If an empty value is given, set as zero. 506 if node_value is None or node_value.isspace() \ 507 or node_value.lower() == "nan": 508 node_value = "0.0" 509 #Convert the value to the base units 510 node_value, units = self._unit_conversion(node, \ 511 cs_values.current_level, data1d, tagname, node_value) 512 513 # If the value is a timestamp, convert to a datetime object 514 elif cs_values.ns_datatype == "timestamp": 515 if node_value is None or node_value.isspace(): 516 pass 517 else: 518 try: 519 node_value = \ 520 datetime.datetime.fromtimestamp(node_value) 521 except ValueError: 522 node_value = None 523 return node_value, units 524 525 def _parse_entry(self, dom, names=None, data1d=None, extras=None): 526 """ 527 Parse a SASEntry - new recursive method for parsing the dom of 528 the CanSAS data format. This will allow multiple data files 529 and extra nodes to be read in simultaneously. 530 531 :param dom: dom object with a namespace base of names 532 :param names: A list of element names that lead up to the dom object 533 :param data1d: The data1d object that will be modified 534 :param extras: Any values that should go into meta_data when data1d 535 is not a Data1D object 536 """ 537 538 if extras is None: 539 extras = [] 540 if names is None or names == []: 541 names = ["SASentry"] 542 543 data1d = self._check_for_empty_data(data1d) 544 545 self.base_ns = "{0}{1}{2}".format("{", \ 546 CANSAS_NS.get(self.cansas_version).get("ns"), "}") 547 tagname = '' 548 tagname_original = '' 549 550 # Go through each child in the parent element 551 for node in dom: 552 try: 553 # Get the element name and set the current names level 554 tagname = node.tag.replace(self.base_ns, "") 555 tagname_original = tagname 556 if tagname == "fitting_plug_in" or tagname == "pr_inversion" or\ 557 tagname == "invariant": 558 continue 559 names.append(tagname) 560 children = node.getchildren() 561 if len(children) == 0: 562 children = None 563 save_data1d = data1d 564 565 # Look for special cases 566 data1d = self._handle_special_cases(tagname, data1d, children) 567 568 # Get where to store content 569 cs_values = CONSTANTS.iterate_namespace(names) 570 # If the element is a child element, recurse 571 if children is not None: 572 # Returned value is new Data1D object with all previous and 573 # new values in it. 574 data1d, extras = self._parse_entry(node, 575 names, data1d, extras) 576 577 #Get the information from the node 578 node_value, _ = self._get_node_value(node, cs_values, \ 579 data1d, tagname) 580 581 # If appending to a dictionary (meta_data | run_name) 582 # make sure the key is unique 583 if cs_values.ns_variable == "{0}.meta_data[\"{2}\"] = \"{1}\"": 584 # If we are within a Process, Detector, Collimation or 585 # Aperture instance, pull out old data1d 586 tagname = self._create_unique_key(data1d.meta_data, \ 587 tagname, 0) 588 if isinstance(data1d, Data1D) == False: 589 store_me = cs_values.ns_variable.format("data1d", \ 590 node_value, tagname) 591 extras.append(store_me) 592 cs_values.ns_variable = None 593 if cs_values.ns_variable == "{0}.run_name[\"{2}\"] = \"{1}\"": 594 tagname = self._create_unique_key(data1d.run_name, \ 595 tagname, 0) 596 597 # Check for Data1D object and any extra commands to save 598 if isinstance(data1d, Data1D): 599 for item in extras: 600 exec item 601 # Don't bother saving empty information unless it is a float 602 if cs_values.ns_variable is not None and \ 603 node_value is not None and \ 604 node_value.isspace() == False: 605 # Format a string and then execute it. 606 store_me = cs_values.ns_variable.format("data1d", \ 607 node_value, tagname) 608 exec store_me 609 # Get attributes and process them 610 data1d = self._handle_attributes(node, data1d, cs_values, \ 611 tagname) 612 613 except TypeError: 614 pass 615 except Exception as excep: 616 exc_type, exc_obj, exc_tb = sys.exc_info() 617 fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] 618 print(excep, exc_type, fname, exc_tb.tb_lineno, \ 619 tagname, exc_obj) 620 finally: 621 # Save special cases in original data1d object 622 # then restore the data1d 623 save_data1d = self._restore_original_case(tagname_original, \ 624 tagname, save_data1d, data1d) 625 if tagname_original == "fitting_plug_in" or \ 626 tagname_original == "invariant" or \ 627 tagname_original == "pr_inversion": 628 pass 629 else: 630 data1d = save_data1d 631 # Remove tagname from names to restore original base 632 names.remove(tagname_original) 633 return data1d, extras 634 635 def _get_pi_string(self): 636 """ 637 Creates the processing instructions header for writing to file 638 """ 639 pis = self.return_processing_instructions() 640 if len(pis) > 0: 641 pi_tree = self.create_tree(pis[0]) 642 i = 1 643 for i in range(1, len(pis) - 1): 644 pi_tree = self.append(pis[i], pi_tree) 645 pi_string = self.to_string(pi_tree) 646 else: 647 pi_string = "" 648 return pi_string 649 650 def _create_main_node(self): 651 """ 652 Creates the primary xml header used when writing to file 653 """ 654 xsi = "http://www.w3.org/2001/XMLSchema-instance" 655 version = self.cansas_version 656 n_s = CANSAS_NS.get(version).get("ns") 657 if version == "1.1": 658 url = "http://www.cansas.org/formats/1.1/" 659 else: 660 url = "http://svn.smallangles.net/svn/canSAS/1dwg/trunk/" 661 schema_location = "{0} {1}cansas1d.xsd".format(n_s, url) 662 attrib = {"{" + xsi + "}schemaLocation" : schema_location, 663 "version" : version} 664 nsmap = {'xsi' : xsi, None: n_s} 665 666 main_node = self.create_element("{" + n_s + "}SASroot", 667 attrib=attrib, nsmap=nsmap) 668 return main_node 669 670 def _write_run_names(self, datainfo, entry_node): 671 """ 672 Writes the run names to the XML file 673 674 :param datainfo: The Data1D object the information is coming from 675 :param entry_node: lxml node ElementTree object to be appended to 676 """ 677 if datainfo.run == None or datainfo.run == []: 678 datainfo.run.append(RUN_NAME_DEFAULT) 679 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT 680 for item in datainfo.run: 681 runname = {} 682 if item in datainfo.run_name and \ 683 len(str(datainfo.run_name[item])) > 1: 684 runname = {'name': datainfo.run_name[item]} 685 self.write_node(entry_node, "Run", item, runname) 686 687 def _write_data(self, datainfo, entry_node): 688 """ 689 Writes the I and Q data to the XML file 690 691 :param datainfo: The Data1D object the information is coming from 692 :param entry_node: lxml node ElementTree object to be appended to 693 """ 694 node = self.create_element("SASdata") 695 self.append(node, entry_node) 696 697 for i in range(len(datainfo.x)): 698 point = self.create_element("Idata") 699 node.append(point) 700 self.write_node(point, "Q", datainfo.x[i], 701 {'unit': datainfo.x_unit}) 702 if len(datainfo.y) >= i: 703 self.write_node(point, "I", datainfo.y[i], 704 {'unit': datainfo.y_unit}) 705 if datainfo.dy != None and len(datainfo.dy) > i: 706 self.write_node(point, "Idev", datainfo.dy[i], 707 {'unit': datainfo.y_unit}) 708 if datainfo.dx != None and len(datainfo.dx) > i: 709 self.write_node(point, "Qdev", datainfo.dx[i], 710 {'unit': datainfo.x_unit}) 711 if datainfo.dxw != None and len(datainfo.dxw) > i: 712 self.write_node(point, "dQw", datainfo.dxw[i], 713 {'unit': datainfo.x_unit}) 714 if datainfo.dxl != None and len(datainfo.dxl) > i: 715 self.write_node(point, "dQl", datainfo.dxl[i], 716 {'unit': datainfo.x_unit}) 717 718 def _write_trans_spectrum(self, datainfo, entry_node): 719 """ 720 Writes the transmission spectrum data to the XML file 721 722 :param datainfo: The Data1D object the information is coming from 723 :param entry_node: lxml node ElementTree object to be appended to 724 """ 725 for i in range(len(datainfo.trans_spectrum)): 726 spectrum = datainfo.trans_spectrum[i] 727 node = self.create_element("SAStransmission_spectrum", 728 {"name" : spectrum.name}) 729 self.append(node, entry_node) 730 if isinstance(spectrum.timestamp, datetime.datetime): 731 node.setAttribute("timestamp", spectrum.timestamp) 732 for i in range(len(spectrum.wavelength)): 733 point = self.create_element("Tdata") 734 node.append(point) 735 self.write_node(point, "Lambda", spectrum.wavelength[i], 736 {'unit': spectrum.wavelength_unit}) 737 self.write_node(point, "T", spectrum.transmission[i], 738 {'unit': spectrum.transmission_unit}) 739 if spectrum.transmission_deviation != None \ 740 and len(spectrum.transmission_deviation) >= i: 741 self.write_node(point, "Tdev", 742 spectrum.transmission_deviation[i], 743 {'unit': 744 spectrum.transmission_deviation_unit}) 745 746 def _write_sample_info(self, datainfo, entry_node): 747 """ 748 Writes the sample information to the XML file 749 750 :param datainfo: The Data1D object the information is coming from 751 :param entry_node: lxml node ElementTree object to be appended to 752 """ 753 sample = self.create_element("SASsample") 754 if datainfo.sample.name is not None: 755 self.write_attribute(sample, "name", 756 str(datainfo.sample.name)) 757 self.append(sample, entry_node) 758 self.write_node(sample, "ID", str(datainfo.sample.ID)) 759 self.write_node(sample, "thickness", datainfo.sample.thickness, 760 {"unit": datainfo.sample.thickness_unit}) 761 self.write_node(sample, "transmission", datainfo.sample.transmission) 762 self.write_node(sample, "temperature", datainfo.sample.temperature, 763 {"unit": datainfo.sample.temperature_unit}) 764 765 pos = self.create_element("position") 766 written = self.write_node(pos, 767 "x", 768 datainfo.sample.position.x, 769 {"unit": datainfo.sample.position_unit}) 770 written = written | self.write_node( \ 771 pos, "y", datainfo.sample.position.y, 772 {"unit": datainfo.sample.position_unit}) 773 written = written | self.write_node( \ 774 pos, "z", datainfo.sample.position.z, 775 {"unit": datainfo.sample.position_unit}) 776 if written == True: 777 self.append(pos, sample) 778 779 ori = self.create_element("orientation") 780 written = self.write_node(ori, "roll", 781 datainfo.sample.orientation.x, 782 {"unit": datainfo.sample.orientation_unit}) 783 written = written | self.write_node( \ 784 ori, "pitch", datainfo.sample.orientation.y, 785 {"unit": datainfo.sample.orientation_unit}) 786 written = written | self.write_node( \ 787 ori, "yaw", datainfo.sample.orientation.z, 788 {"unit": datainfo.sample.orientation_unit}) 789 if written == True: 790 self.append(ori, sample) 791 792 for item in datainfo.sample.details: 793 self.write_node(sample, "details", item) 794 795 def _write_instrument(self, datainfo, entry_node): 796 """ 797 Writes the instrumental information to the XML file 798 799 :param datainfo: The Data1D object the information is coming from 800 :param entry_node: lxml node ElementTree object to be appended to 801 """ 802 instr = self.create_element("SASinstrument") 803 self.append(instr, entry_node) 804 self.write_node(instr, "name", datainfo.instrument) 805 return instr 806 807 def _write_source(self, datainfo, instr): 808 """ 809 Writes the source information to the XML file 810 811 :param datainfo: The Data1D object the information is coming from 812 :param instr: instrument node to be appended to 813 """ 814 source = self.create_element("SASsource") 815 if datainfo.source.name is not None: 816 self.write_attribute(source, "name", 817 str(datainfo.source.name)) 818 self.append(source, instr) 819 if datainfo.source.radiation == None or datainfo.source.radiation == '': 820 datainfo.source.radiation = "neutron" 821 self.write_node(source, "radiation", datainfo.source.radiation) 822 823 size = self.create_element("beam_size") 824 if datainfo.source.beam_size_name is not None: 825 self.write_attribute(size, "name", 826 str(datainfo.source.beam_size_name)) 827 written = self.write_node( \ 828 size, "x", datainfo.source.beam_size.x, 829 {"unit": datainfo.source.beam_size_unit}) 830 written = written | self.write_node( \ 831 size, "y", datainfo.source.beam_size.y, 832 {"unit": datainfo.source.beam_size_unit}) 833 written = written | self.write_node( \ 834 size, "z", datainfo.source.beam_size.z, 835 {"unit": datainfo.source.beam_size_unit}) 836 if written == True: 837 self.append(size, source) 838 839 self.write_node(source, "beam_shape", datainfo.source.beam_shape) 840 self.write_node(source, "wavelength", 841 datainfo.source.wavelength, 842 {"unit": datainfo.source.wavelength_unit}) 843 self.write_node(source, "wavelength_min", 844 datainfo.source.wavelength_min, 845 {"unit": datainfo.source.wavelength_min_unit}) 846 self.write_node(source, "wavelength_max", 847 datainfo.source.wavelength_max, 848 {"unit": datainfo.source.wavelength_max_unit}) 849 self.write_node(source, "wavelength_spread", 850 datainfo.source.wavelength_spread, 851 {"unit": datainfo.source.wavelength_spread_unit}) 852 853 def _write_collimation(self, datainfo, instr): 854 """ 855 Writes the collimation information to the XML file 856 857 :param datainfo: The Data1D object the information is coming from 858 :param instr: lxml node ElementTree object to be appended to 859 """ 860 if datainfo.collimation == [] or datainfo.collimation == None: 861 coll = Collimation() 862 datainfo.collimation.append(coll) 863 for item in datainfo.collimation: 864 coll = self.create_element("SAScollimation") 865 if item.name is not None: 866 self.write_attribute(coll, "name", str(item.name)) 867 self.append(coll, instr) 868 869 self.write_node(coll, "length", item.length, 870 {"unit": item.length_unit}) 871 872 for aperture in item.aperture: 873 apert = self.create_element("aperture") 874 if aperture.name is not None: 875 self.write_attribute(apert, "name", str(aperture.name)) 876 if aperture.type is not None: 877 self.write_attribute(apert, "type", str(aperture.type)) 878 self.append(apert, coll) 879 880 size = self.create_element("size") 881 if aperture.size_name is not None: 882 self.write_attribute(size, "name", 883 str(aperture.size_name)) 884 written = self.write_node(size, "x", aperture.size.x, 885 {"unit": aperture.size_unit}) 886 written = written | self.write_node( \ 887 size, "y", aperture.size.y, 888 {"unit": aperture.size_unit}) 889 written = written | self.write_node( \ 890 size, "z", aperture.size.z, 891 {"unit": aperture.size_unit}) 892 if written == True: 893 self.append(size, apert) 894 895 self.write_node(apert, "distance", aperture.distance, 896 {"unit": aperture.distance_unit}) 897 898 def _write_detectors(self, datainfo, instr): 899 """ 900 Writes the detector information to the XML file 901 902 :param datainfo: The Data1D object the information is coming from 903 :param inst: lxml instrument node to be appended to 904 """ 905 if datainfo.detector == None or datainfo.detector == []: 906 det = Detector() 907 det.name = "" 908 datainfo.detector.append(det) 909 910 for item in datainfo.detector: 911 det = self.create_element("SASdetector") 912 written = self.write_node(det, "name", item.name) 913 written = written | self.write_node(det, "SDD", item.distance, 914 {"unit": item.distance_unit}) 915 if written == True: 916 self.append(det, instr) 917 918 off = self.create_element("offset") 919 written = self.write_node(off, "x", item.offset.x, 920 {"unit": item.offset_unit}) 921 written = written | self.write_node(off, "y", item.offset.y, 922 {"unit": item.offset_unit}) 923 written = written | self.write_node(off, "z", item.offset.z, 924 {"unit": item.offset_unit}) 925 if written == True: 926 self.append(off, det) 927 928 ori = self.create_element("orientation") 929 written = self.write_node(ori, "roll", item.orientation.x, 930 {"unit": item.orientation_unit}) 931 written = written | self.write_node(ori, "pitch", 932 item.orientation.y, 933 {"unit": item.orientation_unit}) 934 written = written | self.write_node(ori, "yaw", 935 item.orientation.z, 936 {"unit": item.orientation_unit}) 937 if written == True: 938 self.append(ori, det) 939 940 center = self.create_element("beam_center") 941 written = self.write_node(center, "x", item.beam_center.x, 942 {"unit": item.beam_center_unit}) 943 written = written | self.write_node(center, "y", 944 item.beam_center.y, 945 {"unit": item.beam_center_unit}) 946 written = written | self.write_node(center, "z", 947 item.beam_center.z, 948 {"unit": item.beam_center_unit}) 949 if written == True: 950 self.append(center, det) 951 952 pix = self.create_element("pixel_size") 953 written = self.write_node(pix, "x", item.pixel_size.x, 954 {"unit": item.pixel_size_unit}) 955 written = written | self.write_node(pix, "y", item.pixel_size.y, 956 {"unit": item.pixel_size_unit}) 957 written = written | self.write_node(pix, "z", item.pixel_size.z, 958 {"unit": item.pixel_size_unit}) 959 if written == True: 960 self.append(pix, det) 961 self.write_node(det, "slit_length", item.slit_length, 962 {"unit": item.slit_length_unit}) 963 964 965 def _write_process_notes(self, datainfo, entry_node): 966 """ 967 Writes the process notes to the XML file 968 969 :param datainfo: The Data1D object the information is coming from 970 :param entry_node: lxml node ElementTree object to be appended to 971 972 """ 973 for item in datainfo.process: 974 node = self.create_element("SASprocess") 975 self.append(node, entry_node) 976 self.write_node(node, "name", item.name) 977 self.write_node(node, "date", item.date) 978 self.write_node(node, "description", item.description) 979 for term in item.term: 980 value = term['value'] 981 del term['value'] 982 self.write_node(node, "term", value, term) 983 for note in item.notes: 984 self.write_node(node, "SASprocessnote", note) 985 if len(item.notes) == 0: 986 self.write_node(node, "SASprocessnote", "") 987 988 def _write_notes(self, datainfo, entry_node): 989 """ 990 Writes the notes to the XML file and creates an empty note if none 991 exist 992 993 :param datainfo: The Data1D object the information is coming from 994 :param entry_node: lxml node ElementTree object to be appended to 995 996 """ 997 if len(datainfo.notes) == 0: 998 node = self.create_element("SASnote") 999 self.append(node, entry_node) 1000 else: 1001 for item in datainfo.notes: 1002 node = self.create_element("SASnote") 1003 self.write_text(node, item) 1004 self.append(node, entry_node) 1005 1006 def _check_origin(self, entry_node, doc, frm): 1007 """ 1008 Return the document, and the SASentry node associated with 1009 the data we just wrote. 1010 If the calling function was not the cansas reader, return a minidom 1011 object rather than an lxml object. 1012 1013 :param entry_node: lxml node ElementTree object to be appended to 1014 :param doc: entire xml tree 1015 """ 1016 if not frm: 1017 frm = inspect.stack()[1] 1018 mod_name = frm[1].replace("\\", "/").replace(".pyc", "") 1019 mod_name = mod_name.replace(".py", "") 1020 mod = mod_name.split("sas/") 1021 mod_name = mod[1] 1022 if mod_name != "sascalc/dataloader/readers/cansas_reader": 1023 string = self.to_string(doc, pretty_print=False) 1024 doc = parseString(string) 1025 node_name = entry_node.tag 1026 node_list = doc.getElementsByTagName(node_name) 1027 entry_node = node_list.item(0) 1028 return doc, entry_node 730 if self.current_dataset.dxl is not None: 731 dql_exists = True 732 if self.current_dataset.dxw is not None: 733 dqw_exists = True 734 if self.current_dataset.dx is not None: 735 dq_exists = True 736 if self.current_dataset.dy is not None: 737 di_exists = True 738 if dqw_exists and not dql_exists: 739 array_size = self.current_dataset.dxw.size - 1 740 self.current_dataset.dxl = np.append(self.current_dataset.dxl, np.zeros([array_size])) 741 elif dql_exists and not dqw_exists: 742 array_size = self.current_dataset.dxl.size - 1 743 self.current_dataset.dxw = np.append(self.current_dataset.dxw, np.zeros([array_size])) 744 elif not dql_exists and not dqw_exists and not dq_exists: 745 array_size = self.current_dataset.x.size - 1 746 self.current_dataset.dx = np.append(self.current_dataset.dx, np.zeros([array_size])) 747 if not di_exists: 748 array_size = self.current_dataset.y.size - 1 749 self.current_dataset.dy = np.append(self.current_dataset.dy, np.zeros([array_size])) 750 751 752 ####### All methods below are for writing CanSAS XML files ####### 753 754 755 def write(self, filename, datainfo): 756 """ 757 Write the content of a Data1D as a CanSAS XML file 758 759 :param filename: name of the file to write 760 :param datainfo: Data1D object 761 """ 762 # Create XML document 763 doc, _ = self._to_xml_doc(datainfo) 764 # Write the file 765 file_ref = open(filename, 'w') 766 if self.encoding == None: 767 self.encoding = "UTF-8" 768 doc.write(file_ref, encoding=self.encoding, 769 pretty_print=True, xml_declaration=True) 770 file_ref.close() 1029 771 1030 772 def _to_xml_doc(self, datainfo): … … 1095 837 return False 1096 838 1097 def write(self, filename, datainfo): 1098 """ 1099 Write the content of a Data1D as a CanSAS XML file 1100 1101 :param filename: name of the file to write 1102 :param datainfo: Data1D object 1103 """ 1104 # Create XML document 1105 doc, _ = self._to_xml_doc(datainfo) 1106 # Write the file 1107 file_ref = open(filename, 'w') 1108 if self.encoding == None: 1109 self.encoding = "UTF-8" 1110 doc.write(file_ref, encoding=self.encoding, 1111 pretty_print=True, xml_declaration=True) 1112 file_ref.close() 839 def _get_pi_string(self): 840 """ 841 Creates the processing instructions header for writing to file 842 """ 843 pis = self.return_processing_instructions() 844 if len(pis) > 0: 845 pi_tree = self.create_tree(pis[0]) 846 i = 1 847 for i in range(1, len(pis) - 1): 848 pi_tree = self.append(pis[i], pi_tree) 849 pi_string = self.to_string(pi_tree) 850 else: 851 pi_string = "" 852 return pi_string 853 854 def _create_main_node(self): 855 """ 856 Creates the primary xml header used when writing to file 857 """ 858 xsi = "http://www.w3.org/2001/XMLSchema-instance" 859 version = self.cansas_version 860 n_s = CANSAS_NS.get(version).get("ns") 861 if version == "1.1": 862 url = "http://www.cansas.org/formats/1.1/" 863 else: 864 url = "http://svn.smallangles.net/svn/canSAS/1dwg/trunk/" 865 schema_location = "{0} {1}cansas1d.xsd".format(n_s, url) 866 attrib = {"{" + xsi + "}schemaLocation" : schema_location, 867 "version" : version} 868 nsmap = {'xsi' : xsi, None: n_s} 869 870 main_node = self.create_element("{" + n_s + "}SASroot", 871 attrib=attrib, nsmap=nsmap) 872 return main_node 873 874 def _write_run_names(self, datainfo, entry_node): 875 """ 876 Writes the run names to the XML file 877 878 :param datainfo: The Data1D object the information is coming from 879 :param entry_node: lxml node ElementTree object to be appended to 880 """ 881 if datainfo.run == None or datainfo.run == []: 882 datainfo.run.append(RUN_NAME_DEFAULT) 883 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT 884 for item in datainfo.run: 885 runname = {} 886 if item in datainfo.run_name and \ 887 len(str(datainfo.run_name[item])) > 1: 888 runname = {'name': datainfo.run_name[item]} 889 self.write_node(entry_node, "Run", item, runname) 890 891 def _write_data(self, datainfo, entry_node): 892 """ 893 Writes the I and Q data to the XML file 894 895 :param datainfo: The Data1D object the information is coming from 896 :param entry_node: lxml node ElementTree object to be appended to 897 """ 898 node = self.create_element("SASdata") 899 self.append(node, entry_node) 900 901 for i in range(len(datainfo.x)): 902 point = self.create_element("Idata") 903 node.append(point) 904 self.write_node(point, "Q", datainfo.x[i], 905 {'unit': datainfo.x_unit}) 906 if len(datainfo.y) >= i: 907 self.write_node(point, "I", datainfo.y[i], 908 {'unit': datainfo.y_unit}) 909 if datainfo.dy != None and len(datainfo.dy) > i: 910 self.write_node(point, "Idev", datainfo.dy[i], 911 {'unit': datainfo.y_unit}) 912 if datainfo.dx != None and len(datainfo.dx) > i: 913 self.write_node(point, "Qdev", datainfo.dx[i], 914 {'unit': datainfo.x_unit}) 915 if datainfo.dxw != None and len(datainfo.dxw) > i: 916 self.write_node(point, "dQw", datainfo.dxw[i], 917 {'unit': datainfo.x_unit}) 918 if datainfo.dxl != None and len(datainfo.dxl) > i: 919 self.write_node(point, "dQl", datainfo.dxl[i], 920 {'unit': datainfo.x_unit}) 921 922 def _write_trans_spectrum(self, datainfo, entry_node): 923 """ 924 Writes the transmission spectrum data to the XML file 925 926 :param datainfo: The Data1D object the information is coming from 927 :param entry_node: lxml node ElementTree object to be appended to 928 """ 929 for i in range(len(datainfo.trans_spectrum)): 930 spectrum = datainfo.trans_spectrum[i] 931 node = self.create_element("SAStransmission_spectrum", 932 {"name" : spectrum.name}) 933 self.append(node, entry_node) 934 if isinstance(spectrum.timestamp, datetime.datetime): 935 node.setAttribute("timestamp", spectrum.timestamp) 936 for i in range(len(spectrum.wavelength)): 937 point = self.create_element("Tdata") 938 node.append(point) 939 self.write_node(point, "Lambda", spectrum.wavelength[i], 940 {'unit': spectrum.wavelength_unit}) 941 self.write_node(point, "T", spectrum.transmission[i], 942 {'unit': spectrum.transmission_unit}) 943 if spectrum.transmission_deviation != None \ 944 and len(spectrum.transmission_deviation) >= i: 945 self.write_node(point, "Tdev", 946 spectrum.transmission_deviation[i], 947 {'unit': 948 spectrum.transmission_deviation_unit}) 949 950 def _write_sample_info(self, datainfo, entry_node): 951 """ 952 Writes the sample information to the XML file 953 954 :param datainfo: The Data1D object the information is coming from 955 :param entry_node: lxml node ElementTree object to be appended to 956 """ 957 sample = self.create_element("SASsample") 958 if datainfo.sample.name is not None: 959 self.write_attribute(sample, "name", 960 str(datainfo.sample.name)) 961 self.append(sample, entry_node) 962 self.write_node(sample, "ID", str(datainfo.sample.ID)) 963 self.write_node(sample, "thickness", datainfo.sample.thickness, 964 {"unit": datainfo.sample.thickness_unit}) 965 self.write_node(sample, "transmission", datainfo.sample.transmission) 966 self.write_node(sample, "temperature", datainfo.sample.temperature, 967 {"unit": datainfo.sample.temperature_unit}) 968 969 pos = self.create_element("position") 970 written = self.write_node(pos, 971 "x", 972 datainfo.sample.position.x, 973 {"unit": datainfo.sample.position_unit}) 974 written = written | self.write_node( \ 975 pos, "y", datainfo.sample.position.y, 976 {"unit": datainfo.sample.position_unit}) 977 written = written | self.write_node( \ 978 pos, "z", datainfo.sample.position.z, 979 {"unit": datainfo.sample.position_unit}) 980 if written == True: 981 self.append(pos, sample) 982 983 ori = self.create_element("orientation") 984 written = self.write_node(ori, "roll", 985 datainfo.sample.orientation.x, 986 {"unit": datainfo.sample.orientation_unit}) 987 written = written | self.write_node( \ 988 ori, "pitch", datainfo.sample.orientation.y, 989 {"unit": datainfo.sample.orientation_unit}) 990 written = written | self.write_node( \ 991 ori, "yaw", datainfo.sample.orientation.z, 992 {"unit": datainfo.sample.orientation_unit}) 993 if written == True: 994 self.append(ori, sample) 995 996 for item in datainfo.sample.details: 997 self.write_node(sample, "details", item) 998 999 def _write_instrument(self, datainfo, entry_node): 1000 """ 1001 Writes the instrumental information to the XML file 1002 1003 :param datainfo: The Data1D object the information is coming from 1004 :param entry_node: lxml node ElementTree object to be appended to 1005 """ 1006 instr = self.create_element("SASinstrument") 1007 self.append(instr, entry_node) 1008 self.write_node(instr, "name", datainfo.instrument) 1009 return instr 1010 1011 def _write_source(self, datainfo, instr): 1012 """ 1013 Writes the source information to the XML file 1014 1015 :param datainfo: The Data1D object the information is coming from 1016 :param instr: instrument node to be appended to 1017 """ 1018 source = self.create_element("SASsource") 1019 if datainfo.source.name is not None: 1020 self.write_attribute(source, "name", 1021 str(datainfo.source.name)) 1022 self.append(source, instr) 1023 if datainfo.source.radiation == None or datainfo.source.radiation == '': 1024 datainfo.source.radiation = "neutron" 1025 self.write_node(source, "radiation", datainfo.source.radiation) 1026 1027 size = self.create_element("beam_size") 1028 if datainfo.source.beam_size_name is not None: 1029 self.write_attribute(size, "name", 1030 str(datainfo.source.beam_size_name)) 1031 written = self.write_node( \ 1032 size, "x", datainfo.source.beam_size.x, 1033 {"unit": datainfo.source.beam_size_unit}) 1034 written = written | self.write_node( \ 1035 size, "y", datainfo.source.beam_size.y, 1036 {"unit": datainfo.source.beam_size_unit}) 1037 written = written | self.write_node( \ 1038 size, "z", datainfo.source.beam_size.z, 1039 {"unit": datainfo.source.beam_size_unit}) 1040 if written == True: 1041 self.append(size, source) 1042 1043 self.write_node(source, "beam_shape", datainfo.source.beam_shape) 1044 self.write_node(source, "wavelength", 1045 datainfo.source.wavelength, 1046 {"unit": datainfo.source.wavelength_unit}) 1047 self.write_node(source, "wavelength_min", 1048 datainfo.source.wavelength_min, 1049 {"unit": datainfo.source.wavelength_min_unit}) 1050 self.write_node(source, "wavelength_max", 1051 datainfo.source.wavelength_max, 1052 {"unit": datainfo.source.wavelength_max_unit}) 1053 self.write_node(source, "wavelength_spread", 1054 datainfo.source.wavelength_spread, 1055 {"unit": datainfo.source.wavelength_spread_unit}) 1056 1057 def _write_collimation(self, datainfo, instr): 1058 """ 1059 Writes the collimation information to the XML file 1060 1061 :param datainfo: The Data1D object the information is coming from 1062 :param instr: lxml node ElementTree object to be appended to 1063 """ 1064 if datainfo.collimation == [] or datainfo.collimation == None: 1065 coll = Collimation() 1066 datainfo.collimation.append(coll) 1067 for item in datainfo.collimation: 1068 coll = self.create_element("SAScollimation") 1069 if item.name is not None: 1070 self.write_attribute(coll, "name", str(item.name)) 1071 self.append(coll, instr) 1072 1073 self.write_node(coll, "length", item.length, 1074 {"unit": item.length_unit}) 1075 1076 for aperture in item.aperture: 1077 apert = self.create_element("aperture") 1078 if aperture.name is not None: 1079 self.write_attribute(apert, "name", str(aperture.name)) 1080 if aperture.type is not None: 1081 self.write_attribute(apert, "type", str(aperture.type)) 1082 self.append(apert, coll) 1083 1084 size = self.create_element("size") 1085 if aperture.size_name is not None: 1086 self.write_attribute(size, "name", 1087 str(aperture.size_name)) 1088 written = self.write_node(size, "x", aperture.size.x, 1089 {"unit": aperture.size_unit}) 1090 written = written | self.write_node( \ 1091 size, "y", aperture.size.y, 1092 {"unit": aperture.size_unit}) 1093 written = written | self.write_node( \ 1094 size, "z", aperture.size.z, 1095 {"unit": aperture.size_unit}) 1096 if written == True: 1097 self.append(size, apert) 1098 1099 self.write_node(apert, "distance", aperture.distance, 1100 {"unit": aperture.distance_unit}) 1101 1102 def _write_detectors(self, datainfo, instr): 1103 """ 1104 Writes the detector information to the XML file 1105 1106 :param datainfo: The Data1D object the information is coming from 1107 :param inst: lxml instrument node to be appended to 1108 """ 1109 if datainfo.detector == None or datainfo.detector == []: 1110 det = Detector() 1111 det.name = "" 1112 datainfo.detector.append(det) 1113 1114 for item in datainfo.detector: 1115 det = self.create_element("SASdetector") 1116 written = self.write_node(det, "name", item.name) 1117 written = written | self.write_node(det, "SDD", item.distance, 1118 {"unit": item.distance_unit}) 1119 if written == True: 1120 self.append(det, instr) 1121 1122 off = self.create_element("offset") 1123 written = self.write_node(off, "x", item.offset.x, 1124 {"unit": item.offset_unit}) 1125 written = written | self.write_node(off, "y", item.offset.y, 1126 {"unit": item.offset_unit}) 1127 written = written | self.write_node(off, "z", item.offset.z, 1128 {"unit": item.offset_unit}) 1129 if written == True: 1130 self.append(off, det) 1131 1132 ori = self.create_element("orientation") 1133 written = self.write_node(ori, "roll", item.orientation.x, 1134 {"unit": item.orientation_unit}) 1135 written = written | self.write_node(ori, "pitch", 1136 item.orientation.y, 1137 {"unit": item.orientation_unit}) 1138 written = written | self.write_node(ori, "yaw", 1139 item.orientation.z, 1140 {"unit": item.orientation_unit}) 1141 if written == True: 1142 self.append(ori, det) 1143 1144 center = self.create_element("beam_center") 1145 written = self.write_node(center, "x", item.beam_center.x, 1146 {"unit": item.beam_center_unit}) 1147 written = written | self.write_node(center, "y", 1148 item.beam_center.y, 1149 {"unit": item.beam_center_unit}) 1150 written = written | self.write_node(center, "z", 1151 item.beam_center.z, 1152 {"unit": item.beam_center_unit}) 1153 if written == True: 1154 self.append(center, det) 1155 1156 pix = self.create_element("pixel_size") 1157 written = self.write_node(pix, "x", item.pixel_size.x, 1158 {"unit": item.pixel_size_unit}) 1159 written = written | self.write_node(pix, "y", item.pixel_size.y, 1160 {"unit": item.pixel_size_unit}) 1161 written = written | self.write_node(pix, "z", item.pixel_size.z, 1162 {"unit": item.pixel_size_unit}) 1163 if written == True: 1164 self.append(pix, det) 1165 self.write_node(det, "slit_length", item.slit_length, 1166 {"unit": item.slit_length_unit}) 1167 1168 1169 def _write_process_notes(self, datainfo, entry_node): 1170 """ 1171 Writes the process notes to the XML file 1172 1173 :param datainfo: The Data1D object the information is coming from 1174 :param entry_node: lxml node ElementTree object to be appended to 1175 1176 """ 1177 for item in datainfo.process: 1178 node = self.create_element("SASprocess") 1179 self.append(node, entry_node) 1180 self.write_node(node, "name", item.name) 1181 self.write_node(node, "date", item.date) 1182 self.write_node(node, "description", item.description) 1183 for term in item.term: 1184 if isinstance(term, list): 1185 value = term['value'] 1186 del term['value'] 1187 else: 1188 value = term 1189 self.write_node(node, "term", value, term) 1190 for note in item.notes: 1191 self.write_node(node, "SASprocessnote", note) 1192 if len(item.notes) == 0: 1193 self.write_node(node, "SASprocessnote", "") 1194 1195 def _write_notes(self, datainfo, entry_node): 1196 """ 1197 Writes the notes to the XML file and creates an empty note if none 1198 exist 1199 1200 :param datainfo: The Data1D object the information is coming from 1201 :param entry_node: lxml node ElementTree object to be appended to 1202 1203 """ 1204 if len(datainfo.notes) == 0: 1205 node = self.create_element("SASnote") 1206 self.append(node, entry_node) 1207 else: 1208 for item in datainfo.notes: 1209 node = self.create_element("SASnote") 1210 self.write_text(node, item) 1211 self.append(node, entry_node) 1212 1213 def _check_origin(self, entry_node, doc, frm): 1214 """ 1215 Return the document, and the SASentry node associated with 1216 the data we just wrote. 1217 If the calling function was not the cansas reader, return a minidom 1218 object rather than an lxml object. 1219 1220 :param entry_node: lxml node ElementTree object to be appended to 1221 :param doc: entire xml tree 1222 """ 1223 if not frm: 1224 frm = inspect.stack()[1] 1225 mod_name = frm[1].replace("\\", "/").replace(".pyc", "") 1226 mod_name = mod_name.replace(".py", "") 1227 mod = mod_name.split("sas/") 1228 mod_name = mod[1] 1229 if mod_name != "sascalc/dataloader/readers/cansas_reader": 1230 string = self.to_string(doc, pretty_print=False) 1231 doc = parseString(string) 1232 node_name = entry_node.tag 1233 node_list = doc.getElementsByTagName(node_name) 1234 entry_node = node_list.item(0) 1235 return doc, entry_node 1113 1236 1114 1237 # DO NOT REMOVE - used in saving and loading panel states. … … 1195 1318 if entry is not None and entry.text is not None: 1196 1319 exec "storage.%s = entry.text.strip()" % variable 1320 1321 1322 # DO NOT REMOVE Called by outside packages: 1323 # sas.sasgui.perspectives.invariant.invariant_state 1324 # sas.sasgui.perspectives.fitting.pagestate 1325 def get_content(location, node): 1326 """ 1327 Get the first instance of the content of a xpath location. 1328 1329 :param location: xpath location 1330 :param node: node to start at 1331 1332 :return: Element, or None 1333 """ 1334 nodes = node.xpath(location, 1335 namespaces={'ns': CANSAS_NS.get("1.0").get("ns")}) 1336 if len(nodes) > 0: 1337 return nodes[0] 1338 else: 1339 return None 1340 1341 # DO NOT REMOVE Called by outside packages: 1342 # sas.sasgui.perspectives.fitting.pagestate 1343 def write_node(doc, parent, name, value, attr=None): 1344 """ 1345 :param doc: document DOM 1346 :param parent: parent node 1347 :param name: tag of the element 1348 :param value: value of the child text node 1349 :param attr: attribute dictionary 1350 1351 :return: True if something was appended, otherwise False 1352 """ 1353 if attr is None: 1354 attr = {} 1355 if value is not None: 1356 node = doc.createElement(name) 1357 node.appendChild(doc.createTextNode(str(value))) 1358 for item in attr: 1359 node.setAttribute(item, attr[item]) 1360 parent.appendChild(node) 1361 return True 1362 return False -
src/sas/sasgui/guiframe/media/graph_help.rst
r318427a9 re68c9bf 20 20 ^^^^^^^^^^^^^^^^^^^^^^^ 21 21 22 To invoke the *Graph Menu* simply right-click on a data/theory plot, or click 23 the *Graph Menu* (bullet list) icon in the toolbar at the bottom of the plot. 22 To invoke the *Graph Menu* simply right-click on a data/theory plot, or click 23 the *Graph Menu* (bullet list) icon in the toolbar at the bottom of the plot. 24 24 Then select a menu item. 25 25 … … 27 27 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 28 28 29 To expand a plot window, click the *Maximise* (square) icon in the top-right 29 To expand a plot window, click the *Maximise* (square) icon in the top-right 30 30 corner. 31 31 32 To shrink a plot window, click the *Restore down* (square-on-square) icon in 32 To shrink a plot window, click the *Restore down* (square-on-square) icon in 33 33 the top-right corner. 34 34 35 To hide a plot, click the *Minimise* (-) icon in the top-right corner of the 35 To hide a plot, click the *Minimise* (-) icon in the top-right corner of the 36 36 plot window. 37 37 38 To show a hidden plot, select the *Restore up* (square-on-square) icon on the 38 To show a hidden plot, select the *Restore up* (square-on-square) icon on the 39 39 minimised window. 40 40 41 To delete a plot, click the *Close* (x) icon in the top-right corner of the 41 To delete a plot, click the *Close* (x) icon in the top-right corner of the 42 42 plot window. 43 43 44 *NOTE! If a residuals graph (when fitting data) is hidden, it will not show up 44 *NOTE! If a residuals graph (when fitting data) is hidden, it will not show up 45 45 after computation.* 46 46 … … 48 48 ^^^^^^^^^^^^^^^ 49 49 50 Select the *Pan* (crossed arrows) icon in the toolbar at the bottom of the plot 51 to activate this option. Move the mouse pointer to the plot. It will change to 52 a hand. Then left-click and drag the plot around. The axis values will adjust 50 Select the *Pan* (crossed arrows) icon in the toolbar at the bottom of the plot 51 to activate this option. Move the mouse pointer to the plot. It will change to 52 a hand. Then left-click and drag the plot around. The axis values will adjust 53 53 accordingly. 54 54 55 55 To disable dragging mode, unselect the *crossed arrows* icon on the toolbar. 56 56 … … 58 58 ^^^^^^^^^^^^^^^^^^^^^^^^ 59 59 60 Select the *Zoom* (magnifying glass) button in the toolbar at the bottom of 61 the plot to activate this option. Move the mouse pointer to the plot. It will 62 change to a cross-hair. Then left-click and drag the pointer around to generate 60 Select the *Zoom* (magnifying glass) button in the toolbar at the bottom of 61 the plot to activate this option. Move the mouse pointer to the plot. It will 62 change to a cross-hair. Then left-click and drag the pointer around to generate 63 63 a region of interest. Release the mouse button to generate the new view. 64 64 65 65 To disable zoom mode, unselect the *Zoom* button on the toolbar. 66 66 67 After zooming in on a a region, the *left arrow* or *right arrow* buttons on 67 After zooming in on a a region, the *left arrow* or *right arrow* buttons on 68 68 the toolbar will switch between recent views. 69 69 70 *NOTE! If a wheel mouse is available scrolling the wheel will zoom in/out 71 on the current plot (changing both axes). Alternatively, point at the numbers 70 *NOTE! If a wheel mouse is available scrolling the wheel will zoom in/out 71 on the current plot (changing both axes). Alternatively, point at the numbers 72 72 on one axis and scroll the wheel to zoom in/out on just that axis.* 73 73 74 To return to the original view of the data, click the the *Reset* (home) icon 74 To return to the original view of the data, click the the *Reset* (home) icon 75 75 in the toolbar at the bottom of the plot (see Resetting_the_graph_ for further details). 76 76 … … 78 78 ^^^^^^^^^^^^^^^^^^^ 79 79 80 To save the current plot as an image file, right click on the plot to bring up 80 To save the current plot as an image file, right click on the plot to bring up 81 81 the *Graph Menu* (see Invoking_the_graph_menu_) and select *Save Image*. 82 Alternatively, click on the *Save* (floppy disk) icon in the toolbar at the 82 Alternatively, click on the *Save* (floppy disk) icon in the toolbar at the 83 83 bottom of the plot. 84 85 A dialog window will open. Select a folder, enter a filename, choose an output 84 85 A dialog window will open. Select a folder, enter a filename, choose an output 86 86 image type, and click *Save*. 87 87 … … 98 98 * TIF/TIFF (tagged iamge file) 99 99 100 The PGF image type option requires a LaTeX compiler: xelatex (default),101 lualatex, or pdflatex. These are not shipped with SasView.102 103 100 Printing a plot 104 101 ^^^^^^^^^^^^^^^ 105 102 106 To send the current plot to a printer, click on the *Print* (printer) icon in 103 To send the current plot to a printer, click on the *Print* (printer) icon in 107 104 the toolbar at the bottom of the plot. 108 105 … … 112 109 ^^^^^^^^^^^^^^^^^^^ 113 110 114 To reset the axis range of a graph to its initial values select *Reset Graph 111 To reset the axis range of a graph to its initial values select *Reset Graph 115 112 Range* on the *Graph Menu* (see Invoking_the_graph_menu_). Alternatively, use 116 113 the *Reset* (home) icon in the toolbar at the bottom of the plot. … … 136 133 137 134 From the *Graph Menu* (see Invoking_the_graph_menu_) select *Change Scale*. A 138 dialog window will appear in which it is possible to choose different 135 dialog window will appear in which it is possible to choose different 139 136 transformations of the x (usually Q) or y (usually I(Q)) axes, including: 140 137 … … 142 139 * y, 1/y, ln(y), y^2, y.(x^4), 1/sqrt(y), 143 140 * log10(y), ln(y.x), ln(y.x^2), ln(y.x^4), log10(y.x^4) 144 141 145 142 A *View* option includes short-cuts to common SAS transformations, such as: 146 143 … … 151 148 * Kratky 152 149 153 For properly corrected and scaled data, these SAS transformations can be used 154 to estimate, for example, Rg, rod diameter, or SANS incoherent background 150 For properly corrected and scaled data, these SAS transformations can be used 151 to estimate, for example, Rg, rod diameter, or SANS incoherent background 155 152 levels, via a linear fit (see Making_a_linear_fit_). 156 153 … … 161 158 162 159 From the *Graph Menu* (see Invoking_the_graph_menu_) select *Toggle Linear/Log 163 Scale* to switch between a linear to log intensity scale. The type of scale 160 Scale* to switch between a linear to log intensity scale. The type of scale 164 161 selected is written alongside the colour scale. 165 162 … … 170 167 171 168 From the *Graph Menu* (see Invoking_the_graph_menu_) select *2D Color Map* to 172 choose a different color scale for the image and/or change the maximum or 169 choose a different color scale for the image and/or change the maximum or 173 170 minimum limits of the scale. 174 171 … … 176 173 ^^^^^^^^^^^^^^^^^^^^^^^^ 177 174 178 Clicking anywhere in the plot window will cause the current coordinates to be 175 Clicking anywhere in the plot window will cause the current coordinates to be 179 176 displayed in the status bar at the very bottom-left of the SasView window. 180 177 181 178 .. ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ 182 179 … … 196 193 197 194 In the *Dataset Menu* (see Invoking_the_dataset_menu_), highlight a data set 198 and select *DataInfo* to bring up a data information dialog panel for that 195 and select *DataInfo* to bring up a data information dialog panel for that 199 196 data set. 200 197 … … 203 200 204 201 In the *Dataset Menu* (see Invoking_the_dataset_menu_), select *Save Points as 205 a File* (if 1D data) or *Save as a file(DAT)* (if 2D data). A save dialog will 202 a File* (if 1D data) or *Save as a file(DAT)* (if 2D data). A save dialog will 206 203 appear. 207 204 208 1D data can be saved in either ASCII text (.TXT) or CanSAS/SASXML (.XML) 205 1D data can be saved in either ASCII text (.TXT) or CanSAS/SASXML (.XML) 209 206 formats (see :ref:`Formats`). 210 207 … … 219 216 220 217 In the *Dataset Menu* (see Invoking_the_dataset_menu_), select *Linear Fit*. A 221 fitting dialog will appear. Set some initial parameters and data limits and 222 click *Fit*. The fitted parameter values are displayed and the resulting line 223 calculated from them is added to the plot. 218 fitting dialog will appear. Set some initial parameters and data limits and 219 click *Fit*. The fitted parameter values are displayed and the resulting line 220 calculated from them is added to the plot. 224 221 225 222 This option is most useful for performing simple Guinier, XS Guinier, and 226 Porod type analyses, for example, to estimate Rg, a rod diameter, or incoherent 223 Porod type analyses, for example, to estimate Rg, a rod diameter, or incoherent 227 224 background level, respectively. 228 225 … … 243 240 244 241 In the *Dataset Menu* (see Invoking_the_dataset_menu_), select *Show Error Bar* 245 or *Hide Error Bar* to switch between showing/hiding the errors associated 246 with the chosen dataset. 242 or *Hide Error Bar* to switch between showing/hiding the errors associated 243 with the chosen dataset. 247 244 248 245 Modify plot properties … … 250 247 251 248 In the *Dataset Menu* (see Invoking_the_dataset_menu_), select *Modify Plot 252 Property* to change the size, color, or shape of the displayed marker for the 249 Property* to change the size, color, or shape of the displayed marker for the 253 250 chosen dataset, or to change the dataset label that appears on the plot. 254 251 … … 263 260 This feature is only available with 2D data. 264 261 265 2D data averaging allows you to perform different types of averages on your 266 data. The region to be averaged is displayed in the plot window and its limits 262 2D data averaging allows you to perform different types of averages on your 263 data. The region to be averaged is displayed in the plot window and its limits 267 264 can be modified by dragging the boundaries around. 268 265 … … 280 277 * Box averaging on Qy 281 278 282 A 'slicer' will appear (except for *Perform Circular Average*) in the plot that 283 you can drag by clicking on a slicer's handle. When the handle is highlighted 279 A 'slicer' will appear (except for *Perform Circular Average*) in the plot that 280 you can drag by clicking on a slicer's handle. When the handle is highlighted 284 281 in red, it means that the slicer can move/change size. 285 282 286 *NOTE! The slicer size will reset if you try to select a region greater than 283 *NOTE! The slicer size will reset if you try to select a region greater than 287 284 the size of the data.* 288 285 289 Alternatively, once a 'slicer' is active you can also select the region to 290 average by bringing back the *Dataset Menu* and selecting *Edit Slicer 291 Parameters*. A dialog window will appear in which you can enter values to 286 Alternatively, once a 'slicer' is active you can also select the region to 287 average by bringing back the *Dataset Menu* and selecting *Edit Slicer 288 Parameters*. A dialog window will appear in which you can enter values to 292 289 define a region or select the number of points to plot (*nbins*). 293 290 294 A separate plot window will also have appeared, displaying the requested 291 A separate plot window will also have appeared, displaying the requested 295 292 average. 296 293 297 *NOTE! The displayed average only updates when input focus is moved back to 294 *NOTE! The displayed average only updates when input focus is moved back to 298 295 that window; ie, when the mouse pointer is moved onto that plot.* 299 296 300 Selecting *Box Sum* automatically brings up the 'Slicer Parameters' dialog in 297 Selecting *Box Sum* automatically brings up the 'Slicer Parameters' dialog in 301 298 order to display the average numerically, rather than graphically. 302 299 … … 306 303 ^^^^^^^^^^^^^^^^^^^^^^^^^ 307 304 308 This operation will perform an average in constant Q-rings around the (x,y) 305 This operation will perform an average in constant Q-rings around the (x,y) 309 306 pixel location of the beam center. 310 307 … … 312 309 ^^^^^^^^^^^^^^^^^^^^^^^ 313 310 314 This operation is the same as 'Unmasked Circular Average' except that any 311 This operation is the same as 'Unmasked Circular Average' except that any 315 312 masked region is excluded. 316 313 … … 320 317 This operation averages in constant Q-arcs. 321 318 322 The width of the sector is specified in degrees (+/- |delta|\|phi|\) each side 319 The width of the sector is specified in degrees (+/- |delta|\|phi|\) each side 323 320 of the central angle (|phi|\). 324 321 … … 326 323 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 327 324 328 This operation performs an average between two Q-values centered on (0,0), 325 This operation performs an average between two Q-values centered on (0,0), 329 326 and averaged over a specified number of pixels. 330 327 331 The data is returned as a function of angle (|phi|\) in degrees with zero 328 The data is returned as a function of angle (|phi|\) in degrees with zero 332 329 degrees at the 3 O'clock position. 333 330 … … 337 334 This operation performs a sum of counts in a 2D region of interest. 338 335 339 When editing the slicer parameters, the user can enter the length and the width 336 When editing the slicer parameters, the user can enter the length and the width 340 337 the rectangular slicer and the coordinates of the center of the rectangle. 341 338 … … 345 342 This operation computes an average I(Qx) for the region of interest. 346 343 347 When editing the slicer parameters, the user can control the length and the 348 width the rectangular slicer. The averaged output is calculated from constant 349 bins with rectangular shape. The resultant Q values are nominal values, that 344 When editing the slicer parameters, the user can control the length and the 345 width the rectangular slicer. The averaged output is calculated from constant 346 bins with rectangular shape. The resultant Q values are nominal values, that 350 347 is, the central value of each bin on the x-axis. 351 348 … … 355 352 This operation computes an average I(Qy) for the region of interest. 356 353 357 When editing the slicer parameters, the user can control the length and the 358 width the rectangular slicer. The averaged output is calculated from constant 359 bins with rectangular shape. The resultant Q values are nominal values, that 354 When editing the slicer parameters, the user can control the length and the 355 width the rectangular slicer. The averaged output is calculated from constant 356 bins with rectangular shape. The resultant Q values are nominal values, that 360 357 is, the central value of each bin on the x-axis. 361 358 -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rd72567e r3d6ab79 62 62 :return: List of Data1D/2D objects and/or a list of errors. 63 63 """ 64 65 64 ## Reinitialize the class when loading a new data file to reset all class variables 66 65 self.reset_class_variables() … … 136 135 ## If this is a dataset, store the data appropriately 137 136 data_set = data[key][:] 137 unit = self._get_unit(value) 138 139 ## I and Q Data 140 if key == u'I': 141 if type(self.current_dataset) is plottable_2D: 142 self.current_dataset.data = data_set.flatten() 143 self.current_dataset.zaxis("Intensity", unit) 144 else: 145 self.current_dataset.y = data_set.flatten() 146 self.current_dataset.yaxis("Intensity", unit) 147 continue 148 elif key == u'Idev': 149 if type(self.current_dataset) is plottable_2D: 150 self.current_dataset.err_data = data_set.flatten() 151 else: 152 self.current_dataset.dy = data_set.flatten() 153 continue 154 elif key == u'Q': 155 self.current_dataset.xaxis("Q", unit) 156 if type(self.current_dataset) is plottable_2D: 157 self.current_dataset.q = data_set.flatten() 158 else: 159 self.current_dataset.x = data_set.flatten() 160 continue 161 elif key == u'Qy': 162 self.current_dataset.yaxis("Q_y", unit) 163 self.current_dataset.qy_data = data_set.flatten() 164 continue 165 elif key == u'Qydev': 166 self.current_dataset.dqy_data = data_set.flatten() 167 continue 168 elif key == u'Qx': 169 self.current_dataset.xaxis("Q_x", unit) 170 self.current_dataset.qx_data = data_set.flatten() 171 continue 172 elif key == u'Qxdev': 173 self.current_dataset.dqx_data = data_set.flatten() 174 continue 175 elif key == u'Mask': 176 self.current_dataset.mask = data_set.flatten() 177 continue 138 178 139 179 for data_point in data_set: 140 180 ## Top Level Meta Data 141 unit = self._get_unit(value)142 181 if key == u'definition': 143 182 self.current_datainfo.meta_data['reader'] = data_point … … 149 188 self.current_datainfo.notes.append(data_point) 150 189 151 ## I and Q Data152 elif key == u'I':153 if type(self.current_dataset) is plottable_2D:154 self.current_dataset.data = np.append(self.current_dataset.data, data_point)155 self.current_dataset.zaxis("Intensity", unit)156 else:157 self.current_dataset.y = np.append(self.current_dataset.y, data_point)158 self.current_dataset.yaxis("Intensity", unit)159 elif key == u'Idev':160 if type(self.current_dataset) is plottable_2D:161 self.current_dataset.err_data = np.append(self.current_dataset.err_data, data_point)162 else:163 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point)164 elif key == u'Q':165 self.current_dataset.xaxis("Q", unit)166 if type(self.current_dataset) is plottable_2D:167 self.current_dataset.q = np.append(self.current_dataset.q, data_point)168 else:169 self.current_dataset.x = np.append(self.current_dataset.x, data_point)170 elif key == u'Qy':171 self.current_dataset.yaxis("Q_y", unit)172 self.current_dataset.qy_data = np.append(self.current_dataset.qy_data, data_point)173 elif key == u'Qydev':174 self.current_dataset.dqy_data = np.append(self.current_dataset.dqy_data, data_point)175 elif key == u'Qx':176 self.current_dataset.xaxis("Q_x", unit)177 self.current_dataset.qx_data = np.append(self.current_dataset.qx_data, data_point)178 elif key == u'Qxdev':179 self.current_dataset.dqx_data = np.append(self.current_dataset.dqx_data, data_point)180 elif key == u'Mask':181 self.current_dataset.mask = np.append(self.current_dataset.mask, data_point)182 183 190 ## Sample Information 184 elif key == u'Title' and self.parent_class == u'SASsample': 191 elif key == u'Title' and self.parent_class == u'SASsample': # CanSAS 2.0 format 192 self.current_datainfo.sample.name = data_point 193 elif key == u'name' and self.parent_class == u'SASsample': # NXcanSAS format 185 194 self.current_datainfo.sample.name = data_point 186 195 elif key == u'thickness' and self.parent_class == u'SASsample': … … 206 215 elif key == u'name' and self.parent_class == u'SASprocess': 207 216 self.process.name = data_point 208 elif key == u'Title' and self.parent_class == u'SASprocess': 217 elif key == u'Title' and self.parent_class == u'SASprocess': # CanSAS 2.0 format 218 self.process.name = data_point 219 elif key == u'name' and self.parent_class == u'SASprocess': # NXcanSAS format 209 220 self.process.name = data_point 210 221 elif key == u'description' and self.parent_class == u'SASprocess': … … 296 307 ## Type cast data arrays to float64 and find min/max as appropriate 297 308 for dataset in self.data2d: 298 dataset.data = np.delete(dataset.data, [0])299 309 dataset.data = dataset.data.astype(np.float64) 300 dataset.err_data = np.delete(dataset.err_data, [0])301 310 dataset.err_data = dataset.err_data.astype(np.float64) 302 dataset.mask = np.delete(dataset.mask, [0])303 311 if dataset.qx_data is not None: 304 dataset.qx_data = np.delete(dataset.qx_data, [0])305 312 dataset.xmin = np.min(dataset.qx_data) 306 313 dataset.xmax = np.max(dataset.qx_data) 307 314 dataset.qx_data = dataset.qx_data.astype(np.float64) 308 315 if dataset.dqx_data is not None: 309 dataset.dqx_data = np.delete(dataset.dqx_data, [0])310 316 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 311 317 if dataset.qy_data is not None: 312 dataset.qy_data = np.delete(dataset.qy_data, [0])313 318 dataset.ymin = np.min(dataset.qy_data) 314 319 dataset.ymax = np.max(dataset.qy_data) 315 320 dataset.qy_data = dataset.qy_data.astype(np.float64) 316 321 if dataset.dqy_data is not None: 317 dataset.dqy_data = np.delete(dataset.dqy_data, [0])318 322 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 319 323 if dataset.q_data is not None: 320 dataset.q_data = np.delete(dataset.q_data, [0])321 324 dataset.q_data = dataset.q_data.astype(np.float64) 322 325 zeros = np.ones(dataset.data.size, dtype=bool) … … 338 341 for dataset in self.data1d: 339 342 if dataset.x is not None: 340 dataset.x = np.delete(dataset.x, [0])341 343 dataset.x = dataset.x.astype(np.float64) 342 344 dataset.xmin = np.min(dataset.x) 343 345 dataset.xmax = np.max(dataset.x) 344 346 if dataset.y is not None: 345 dataset.y = np.delete(dataset.y, [0])346 347 dataset.y = dataset.y.astype(np.float64) 347 348 dataset.ymin = np.min(dataset.y) 348 349 dataset.ymax = np.max(dataset.y) 349 350 if dataset.dx is not None: 350 dataset.dx = np.delete(dataset.dx, [0])351 351 dataset.dx = dataset.dx.astype(np.float64) 352 352 if dataset.dxl is not None: 353 dataset.dxl = np.delete(dataset.dxl, [0])354 353 dataset.dxl = dataset.dxl.astype(np.float64) 355 354 if dataset.dxw is not None: 356 dataset.dxw = np.delete(dataset.dxw, [0])357 355 dataset.dxw = dataset.dxw.astype(np.float64) 358 356 if dataset.dy is not None: 359 dataset.dy = np.delete(dataset.dy, [0])360 357 dataset.dy = dataset.dy.astype(np.float64) 361 358 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) … … 375 372 self.data2d = [] 376 373 self.current_datainfo = DataInfo() 374 377 375 378 376 def _initialize_new_data_set(self, parent_list = None):
Note: See TracChangeset
for help on using the changeset viewer.