Changeset 4d465e3 in sasview for src/sas/sascalc
- Timestamp:
- Apr 9, 2017 4:03:36 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- a79ab31
- Parents:
- aa45dcd (diff), ec65dc81 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/realspace/VolumeCanvas.py
rd85c194 r959eb01 303 303 self.shapecount += 1 304 304 305 # model changed, need to recalculate P(r)305 # model changed, need to recalculate P(r) 306 306 self._model_changed() 307 307 … … 328 328 id = "shape"+str(self.shapecount) 329 329 330 # shapeDesc = ShapeDescriptor(shape.lower())330 # shapeDesc = ShapeDescriptor(shape.lower()) 331 331 if shape.lower() in shape_dict: 332 332 shapeDesc = shape_dict[shape.lower()]() … … 335 335 shapeDesc = PDBDescriptor(shape) 336 336 else: 337 raise ValueError , "VolumeCanvas.add: Unknown shape %s" % shape337 raise ValueError("VolumeCanvas.add: Unknown shape %s" % shape) 338 338 339 339 return self.addObject(shapeDesc, id) … … 345 345 """ 346 346 347 if self.shapes.has_key(id):347 if id in self.shapes: 348 348 del self.shapes[id] 349 349 else: 350 raise KeyError , "VolumeCanvas.delete: could not find shape ID"351 352 # model changed, need to recalculate P(r)350 raise KeyError("VolumeCanvas.delete: could not find shape ID") 351 352 # model changed, need to recalculate P(r) 353 353 self._model_changed() 354 354 … … 379 379 # If a shape identifier was given, look the shape up 380 380 # in the dictionary 381 if len(toks) >1:382 if toks[0] in self.shapes .keys():381 if len(toks): 382 if toks[0] in self.shapes: 383 383 # The shape was found, now look for the parameter 384 384 if toks[1] in self.shapes[toks[0]].params: … … 387 387 self._model_changed() 388 388 else: 389 raise ValueError , "Could not find parameter %s" % name389 raise ValueError("Could not find parameter %s" % name) 390 390 else: 391 raise ValueError , "Could not find shape %s" % toks[0]391 raise ValueError("Could not find shape %s" % toks[0]) 392 392 393 393 else: … … 410 410 if len(toks) == 1: 411 411 try: 412 self.params.has_key(toks[0])412 value = self.params[toks[0]] 413 413 except KeyError: 414 raise ValueError, \ 415 "VolumeCanvas.getParam: Could not find %s" % name 416 417 value = self.params[toks[0]] 414 raise ValueError("VolumeCanvas.getParam: Could not find" 415 " %s" % name) 418 416 if isinstance(value, ShapeDescriptor): 419 raise ValueError , \420 "VolumeCanvas.getParam: Cannot get parameter value."417 raise ValueError("VolumeCanvas.getParam: Cannot get parameter" 418 " value.") 421 419 else: 422 420 return value … … 424 422 elif len(toks) == 2: 425 423 try: 426 s elf.shapes.has_key(toks[0])424 shapeinstance = self.shapes[toks[0]] 427 425 except KeyError: 428 raise ValueError, \ 429 "VolumeCanvas.getParam: Could not find %s" % name 430 431 shapeinstance = self.shapes[toks[0]] 432 433 try: 434 shapeinstance.params.has_key(toks[1]) 435 except KeyError: 436 raise ValueError, \ 437 "VolumeCanvas.getParam: Could not find %s" % name 426 raise ValueError("VolumeCanvas.getParam: Could not find " 427 "%s" % name) 428 429 if not toks[1] in shapeinstance.params: 430 raise ValueError("VolumeCanvas.getParam: Could not find " 431 "%s" % name) 438 432 439 433 return shapeinstance.params[toks[1]] 440 434 441 435 else: 442 raise ValueError, \ 443 "VolumeCanvas.getParam: Could not find %s" % name 444 445 def getParamList(self, shapeid = None): 436 raise ValueError("VolumeCanvas.getParam: Could not find %s" % name) 437 438 def getParamList(self, shapeid=None): 446 439 """ 447 440 return a full list of all available parameters from … … 455 448 456 449 param_list = [] 457 if shapeid == None:458 for key1 in self.params .keys():450 if shapeid is None: 451 for key1 in self.params: 459 452 #value1 = self.params[key1] 460 453 param_list.append(key1) 461 for key2 in self.shapes .keys():454 for key2 in self.shapes: 462 455 value2 = self.shapes[key2] 463 456 header = key2 + '.' 464 for key3 in value2.params .keys():457 for key3 in value2.params: 465 458 fullname = header + key3 466 459 param_list.append(fullname) 467 460 468 461 else: 469 try: 470 self.shapes.has_key(shapeid) 471 except KeyError: 472 raise ValueError, \ 473 "VolumeCanvas: getParamList: Could not find %s" % shapeid 462 if not shapeid in self.shapes: 463 raise ValueError("VolumeCanvas: getParamList: Could not find " 464 "%s" % shapeid) 465 474 466 header = shapeid + '.' 475 param_list = self.shapes[shapeid].params.keys() 476 for i in range(len(param_list)): 477 param_list[i] = header + param_list[i] 478 467 param_list = [header + param for param in self.shapes[shapeid].params] 479 468 return param_list 480 469 … … 490 479 @param shapeDesc: shape description 491 480 """ 492 # Create the object model481 # Create the object model 493 482 shapeDesc.create() 494 483 … … 605 594 # type we recognize 606 595 else: 607 raise ValueError , "run(q): bad type for q"596 raise ValueError("run(q): bad type for q") 608 597 609 598 def runXY(self, q = 0): … … 625 614 # type we recognize 626 615 else: 627 raise ValueError , "runXY(q): bad type for q"616 raise ValueError("runXY(q): bad type for q") 628 617 629 618 def _create_modelObject(self): -
src/sas/sascalc/data_util/qsmearing.py
r9a5097c rd01b55c 67 67 if _found_sesans == True: 68 68 #Pre-compute the Hankel matrix (H) 69 qmax, qunits = data.sample.zacceptance70 69 SElength = Converter(data._xunit)(data.x, "A") 71 zaccept = Converter(qunits)(qmax, "1/A"), 70 71 theta_max = Converter("radians")(data.sample.zacceptance)[0] 72 q_max = 2 * np.pi / np.max(data.source.wavelength) * np.sin(theta_max) 73 zaccept = Converter("1/A")(q_max, "1/" + data.source.wavelength_unit), 74 72 75 Rmax = 10000000 73 hankel = SesansTransform(data.x, SElength, zaccept, Rmax)76 hankel = SesansTransform(data.x, SElength, data.source.wavelength, zaccept, Rmax) 74 77 # Then return the actual transform, as if it were a smearing function 75 78 return PySmear(hankel, model, offset=0) -
src/sas/sascalc/dataloader/readers/sesans_reader.py
r9a5097c r857cc58 1 1 """ 2 2 SESANS reader (based on ASCII reader) 3 3 4 4 Reader for .ses or .sesans file format 5 6 Jurrian Bakker 5 6 Jurrian Bakker 7 7 """ 8 import logging 8 9 import numpy as np 9 10 import os … … 18 19 _ZERO = 1e-16 19 20 21 20 22 class Reader: 21 23 """ 22 24 Class to load sesans files (6 columns). 23 25 """ 24 # #File type26 # File type 25 27 type_name = "SESANS" 26 27 # #Wildcards28 29 # Wildcards 28 30 type = ["SESANS files (*.ses)|*.ses", 29 31 "SESANS files (*..sesans)|*.sesans"] 30 # #List of allowed extensions32 # List of allowed extensions 31 33 ext = ['.ses', '.SES', '.sesans', '.SESANS'] 32 33 # #Flag to bypass extension check34 35 # Flag to bypass extension check 34 36 allow_all = True 35 37 36 38 def read(self, path): 37 38 # print "reader triggered"39 40 39 """ 41 40 Load data file 42 41 43 42 :param path: file path 44 43 45 44 :return: SESANSData1D object, or None 46 45 47 46 :raise RuntimeError: when the file can't be opened 48 47 :raise ValueError: when the length of the data vectors are inconsistent … … 51 50 basename = os.path.basename(path) 52 51 _, extension = os.path.splitext(basename) 53 if self.allow_all or extension.lower() in self.ext: 54 try: 55 # Read in binary mode since GRASP frequently has no-ascii 56 # characters that brakes the open operation 57 input_f = open(path,'rb') 58 except: 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 buff = input_f.read() 61 lines = buff.splitlines() 62 x = np.zeros(0) 63 y = np.zeros(0) 64 dy = np.zeros(0) 65 lam = np.zeros(0) 66 dlam = np.zeros(0) 67 dx = np.zeros(0) 68 69 #temp. space to sort data 70 tx = np.zeros(0) 71 ty = np.zeros(0) 72 tdy = np.zeros(0) 73 tlam = np.zeros(0) 74 tdlam = np.zeros(0) 75 tdx = np.zeros(0) 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 77 self.filename = output.filename = basename 52 if not (self.allow_all or extension.lower() in self.ext): 53 raise RuntimeError("{} has an unrecognized file extension".format(path)) 54 else: 55 raise RunetimeError("{} is not a file".format(path)) 56 with open(path, 'r') as input_f: 57 # Read in binary mode since GRASP frequently has no-ascii 58 # characters that brakes the open operation 59 line = input_f.readline() 60 params = {} 61 while not line.startswith("BEGIN_DATA"): 62 terms = line.split() 63 if len(terms) >= 2: 64 params[terms[0]] = " ".join(terms[1:]) 65 line = input_f.readline() 66 self.params = params 67 headers = input_f.readline().split() 78 68 79 paramnames=[] 80 paramvals=[] 81 zvals=[] 82 dzvals=[] 83 lamvals=[] 84 dlamvals=[] 85 Pvals=[] 86 dPvals=[] 69 data = np.loadtxt(input_f) 70 if data.size < 1: 71 raise RuntimeError("{} is empty".format(path)) 72 x = data[:, headers.index("SpinEchoLength")] 73 if "SpinEchoLength_error" in headers: 74 dx = data[:, headers.index("SpinEchoLength_error")] 75 else: 76 dx = x*0.05 77 lam = data[:, headers.index("Wavelength")] 78 if "Wavelength_error" in headers: 79 dlam = data[:, headers.index("Wavelength_error")] 80 else: 81 dlam = lam*0.05 82 y = data[:, headers.index("Depolarisation")] 83 dy = data[:, headers.index("Depolarisation_error")] 87 84 88 for line in lines: 89 # Initial try for CSV (split on ,) 90 line=line.strip() 91 toks = line.split('\t') 92 if len(toks)==2: 93 paramnames.append(toks[0]) 94 paramvals.append(toks[1]) 95 if len(toks)>5: 96 zvals.append(toks[0]) 97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 102 else: 103 continue 85 lam_unit = self._unit_fetch("Wavelength") 86 x, x_unit = self._unit_conversion(x, "A", self._unit_fetch("SpinEchoLength")) 87 dx, dx_unit = self._unit_conversion( 88 dx, lam_unit, 89 self._unit_fetch("SpinEchoLength")) 90 dlam, dlam_unit = self._unit_conversion( 91 dlam, lam_unit, 92 self._unit_fetch("Wavelength")) 93 y_unit = self._unit_fetch("Depolarisation") 104 94 105 x=[] 106 y=[] 107 lam=[] 108 dx=[] 109 dy=[] 110 dlam=[] 111 lam_header = lamvals[0].split() 112 data_conv_z = None 113 default_z_unit = "A" 114 data_conv_P = None 115 default_p_unit = " " # Adjust unit for axis (L^-3) 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 120 valrange=range(1, len(zvals)) 121 for i in valrange: 122 x.append(float(zvals[i])) 123 y.append(float(Pvals[i])) 124 lam.append(float(lamvals[i])) 125 dy.append(float(dPvals[i])) 126 dx.append(float(dzvals[i])) 127 dlam.append(float(dlamvals[i])) 95 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, 96 isSesans=True) 128 97 129 x,y,lam,dy,dx,dlam = [ 130 np.asarray(v, 'double') 131 for v in (x,y,lam,dy,dx,dlam) 132 ] 98 output.y_unit = y_unit 99 output.x_unit = x_unit 100 output.source.wavelength_unit = lam_unit 101 output.source.wavelength = lam 102 self.filename = output.filename = basename 103 output.xaxis(r"\rm{z}", x_unit) 104 # Adjust label to ln P/(lam^2 t), remove lam column refs 105 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", y_unit) 106 # Store loading process information 107 output.meta_data['loader'] = self.type_name 108 output.sample.name = params["Sample"] 109 output.sample.ID = params["DataFileTitle"] 110 output.sample.thickness = self._unit_conversion( 111 float(params["Thickness"]), "cm", 112 self._unit_fetch("Thickness"))[0] 133 113 134 input_f.close() 114 output.sample.zacceptance = ( 115 float(params["Theta_zmax"]), 116 self._unit_fetch("Theta_zmax")) 135 117 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 137 output.y = y 138 output.y_unit = r'\AA^{-2} cm^{-1}' # output y_unit added 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 140 output.dy = dy 141 output.lam, output.lam_unit = self._unit_conversion(lam, lam_unit, default_z_unit) 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 143 144 output.xaxis(r"\rm{z}", output.x_unit) 145 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 118 output.sample.yacceptance = ( 119 float(params["Theta_ymax"]), 120 self._unit_fetch("Theta_ymax")) 121 return output 146 122 147 # Store loading process information 148 output.meta_data['loader'] = self.type_name 149 #output.sample.thickness = float(paramvals[6]) 150 output.sample.name = paramvals[1] 151 output.sample.ID = paramvals[0] 152 zaccept_unit_split = paramnames[7].split("[") 153 zaccept_unit = zaccept_unit_split[1].replace("]","") 154 if zaccept_unit.strip() == r'\AA^-1' or zaccept_unit.strip() == r'\A^-1': 155 zaccept_unit = "1/A" 156 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 157 output.vars = varheader 123 @staticmethod 124 def _unit_conversion(value, value_unit, default_unit): 125 """ 126 Performs unit conversion on a measurement. 158 127 159 if len(output.x) < 1: 160 raise RuntimeError, "%s is empty" % path 161 return output 162 163 else: 164 raise RuntimeError, "%s is not a file" % path 165 return None 166 167 def _unit_conversion(self, value, value_unit, default_unit): 168 if has_converter == True and value_unit != default_unit: 169 data_conv_q = Converter(value_unit) 170 value = data_conv_q(value, units=default_unit) 128 :param value: The magnitude of the measurement 129 :param value_unit: a string containing the final desired unit 130 :param default_unit: a string containing the units of the original measurement 131 :return: The magnitude of the measurement in the new units 132 """ 133 # (float, string, string) -> float 134 if has_converter and value_unit != default_unit: 135 data_conv_q = Converter(default_unit) 136 value = data_conv_q(value, units=value_unit) 171 137 new_unit = default_unit 172 138 else: 173 139 new_unit = value_unit 174 140 return value, new_unit 141 142 def _unit_fetch(self, unit): 143 return self.params[unit+"_unit"]
Note: See TracChangeset
for help on using the changeset viewer.