Changes in / [9efdb29:9e6aeaf] in sasview
- Location:
- src/sas
- Files:
-
- 2 added
- 2 deleted
- 12 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/data_info.py
r574adc7 rdeaa0c6 1176 1176 final_dataset.yaxis(data._yaxis, data._yunit) 1177 1177 final_dataset.zaxis(data._zaxis, data._zunit) 1178 final_dataset.x_bins = data.x_bins 1179 final_dataset.y_bins = data.y_bins 1178 if len(data.data.shape) == 2: 1179 n_rows, n_cols = data.data.shape 1180 final_dataset.y_bins = data.qy_data[0::int(n_cols)] 1181 final_dataset.x_bins = data.qx_data[:int(n_cols)] 1180 1182 else: 1181 1183 return_string = "Should Never Happen: _combine_data_info_with_plottable input is not a plottable1d or " + \ -
src/sas/sascalc/dataloader/file_reader_base_class.py
r7b50f14 rdeaa0c6 197 197 dataset.x_bins = dataset.qx_data[:int(n_cols)] 198 198 dataset.data = dataset.data.flatten() 199 if len(dataset.data) > 0: 200 dataset.xmin = np.min(dataset.qx_data) 201 dataset.xmax = np.max(dataset.qx_data) 202 dataset.ymin = np.min(dataset.qy_data) 203 dataset.ymax = np.max(dataset.qx_data) 199 204 200 205 def format_unit(self, unit=None): … … 221 226 self.output = [] 222 227 223 def remove_empty_q_values(self, has_error_dx=False, has_error_dy=False, 224 has_error_dxl=False, has_error_dxw=False): 228 def data_cleanup(self): 229 """ 230 Clean up the data sets and refresh everything 231 :return: None 232 """ 233 self.remove_empty_q_values() 234 self.send_to_output() # Combine datasets with DataInfo 235 self.current_datainfo = DataInfo() # Reset DataInfo 236 237 def remove_empty_q_values(self): 225 238 """ 226 239 Remove any point where Q == 0 227 240 """ 228 x = self.current_dataset.x 229 self.current_dataset.x = self.current_dataset.x[x != 0] 230 self.current_dataset.y = self.current_dataset.y[x != 0] 231 if has_error_dy: 232 self.current_dataset.dy = self.current_dataset.dy[x != 0] 233 if has_error_dx: 234 self.current_dataset.dx = self.current_dataset.dx[x != 0] 235 if has_error_dxl: 236 self.current_dataset.dxl = self.current_dataset.dxl[x != 0] 237 if has_error_dxw: 238 self.current_dataset.dxw = self.current_dataset.dxw[x != 0] 241 if isinstance(self.current_dataset, plottable_1D): 242 # Booleans for resolutions 243 has_error_dx = self.current_dataset.dx is not None 244 has_error_dxl = self.current_dataset.dxl is not None 245 has_error_dxw = self.current_dataset.dxw is not None 246 has_error_dy = self.current_dataset.dy is not None 247 # Create arrays of zeros for non-existent resolutions 248 if has_error_dxw and not has_error_dxl: 249 array_size = self.current_dataset.dxw.size - 1 250 self.current_dataset.dxl = np.append(self.current_dataset.dxl, 251 np.zeros([array_size])) 252 has_error_dxl = True 253 elif has_error_dxl and not has_error_dxw: 254 array_size = self.current_dataset.dxl.size - 1 255 self.current_dataset.dxw = np.append(self.current_dataset.dxw, 256 np.zeros([array_size])) 257 has_error_dxw = True 258 elif not has_error_dxl and not has_error_dxw and not has_error_dx: 259 array_size = self.current_dataset.x.size - 1 260 self.current_dataset.dx = np.append(self.current_dataset.dx, 261 np.zeros([array_size])) 262 has_error_dx = True 263 if not has_error_dy: 264 array_size = self.current_dataset.y.size - 1 265 self.current_dataset.dy = np.append(self.current_dataset.dy, 266 np.zeros([array_size])) 267 has_error_dy = True 268 269 # Remove points where q = 0 270 x = self.current_dataset.x 271 self.current_dataset.x = self.current_dataset.x[x != 0] 272 self.current_dataset.y = self.current_dataset.y[x != 0] 273 if has_error_dy: 274 self.current_dataset.dy = self.current_dataset.dy[x != 0] 275 if has_error_dx: 276 self.current_dataset.dx = self.current_dataset.dx[x != 0] 277 if has_error_dxl: 278 self.current_dataset.dxl = self.current_dataset.dxl[x != 0] 279 if has_error_dxw: 280 self.current_dataset.dxw = self.current_dataset.dxw[x != 0] 281 elif isinstance(self.current_dataset, plottable_2D): 282 has_error_dqx = self.current_dataset.dqx_data is not None 283 has_error_dqy = self.current_dataset.dqy_data is not None 284 has_error_dy = self.current_dataset.err_data is not None 285 has_mask = self.current_dataset.mask is not None 286 x = self.current_dataset.qx_data 287 self.current_dataset.data = self.current_dataset.data[x != 0] 288 self.current_dataset.qx_data = self.current_dataset.qx_data[x != 0] 289 self.current_dataset.qy_data = self.current_dataset.qy_data[x != 0] 290 self.current_dataset.q_data = np.sqrt( 291 np.square(self.current_dataset.qx_data) + np.square( 292 self.current_dataset.qy_data)) 293 if has_error_dy: 294 self.current_dataset.err_data = self.current_dataset.err_data[x != 0] 295 if has_error_dqx: 296 self.current_dataset.dqx_data = self.current_dataset.dqx_data[x != 0] 297 if has_error_dqy: 298 self.current_dataset.dqy_data = self.current_dataset.dqy_data[x != 0] 299 if has_mask: 300 self.current_dataset.mask = self.current_dataset.mask[x != 0] 239 301 240 302 def reset_data_list(self, no_lines=0): -
src/sas/sascalc/dataloader/readers/abs_reader.py
r46cf4c9 rffb6474 104 104 # Sample thickness in mm 105 105 try: 106 value = float(line_toks[5]) 106 # ABS writer adds 'C' with no space to the end of the 107 # thickness column. Remove it if it is there before 108 # converting the thickness. 109 if line_toks[5][:-1] not in '012345679.': 110 value = float(line_toks[5][:-1]) 111 else: 112 value = float(line_toks[5]) 107 113 if self.current_datainfo.sample.thickness_unit != 'cm': 108 114 conv = Converter('cm') … … 196 202 is_data_started = True 197 203 198 self.remove_empty_q_values( True, True)204 self.remove_empty_q_values() 199 205 200 206 # Sanity check -
src/sas/sascalc/dataloader/readers/ascii_reader.py
rf7d720f r7b07fbe 156 156 raise FileContentsException(msg) 157 157 158 self.remove_empty_q_values( has_error_dx, has_error_dy)158 self.remove_empty_q_values() 159 159 self.current_dataset.xaxis("\\rm{Q}", 'A^{-1}') 160 160 self.current_dataset.yaxis("\\rm{Intensity}", "cm^{-1}") -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r9efdb29 r62160509 100 100 xml_file = self.f_open.name 101 101 # We don't sure f_open since lxml handles opnening/closing files 102 if not self.f_open.closed:103 self.f_open.close()104 105 basename, _ = os.path.splitext(os.path.basename(xml_file))106 107 102 try: 108 103 # Raises FileContentsException 109 104 self.load_file_and_schema(xml_file, schema_path) 110 self.current_datainfo = DataInfo() 111 # Raises FileContentsException if file doesn't meet CanSAS schema 105 # Parse each SASentry 106 entry_list = self.xmlroot.xpath('/ns:SASroot/ns:SASentry', 107 namespaces={ 108 'ns': self.cansas_defaults.get( 109 "ns") 110 }) 112 111 self.is_cansas(self.extension) 113 self.invalid = False # If we reach this point then file must be valid CanSAS114 115 # Parse each SASentry116 entry_list = self.xmlroot.xpath('/ns:SASroot/ns:SASentry', namespaces={117 'ns': self.cansas_defaults.get("ns")118 })119 # Look for a SASentry120 self.names.append("SASentry")121 112 self.set_processing_instructions() 122 123 113 for entry in entry_list: 124 self.current_datainfo.filename = basename + self.extension125 self.current_datainfo.meta_data["loader"] = "CanSAS XML 1D"126 self.current_datainfo.meta_data[PREPROCESS] = self.processing_instructions127 114 self._parse_entry(entry) 128 115 self.data_cleanup() … … 146 133 invalid_xml = self.find_invalid_xml() 147 134 if invalid_xml != "": 135 basename, _ = os.path.splitext( 136 os.path.basename(self.f_open.name)) 148 137 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 149 138 raise DataReaderException(invalid_xml) # Handled by base class … … 160 149 except Exception as e: # Convert all other exceptions to FileContentsExceptions 161 150 raise FileContentsException(str(e)) 162 151 finally: 152 if not self.f_open.closed: 153 self.f_open.close() 163 154 164 155 def load_file_and_schema(self, xml_file, schema_path=""): … … 205 196 if not self._is_call_local() and not recurse: 206 197 self.reset_state() 198 if not recurse: 199 self.current_datainfo = DataInfo() 200 # Raises FileContentsException if file doesn't meet CanSAS schema 201 self.invalid = False 202 # Look for a SASentry 207 203 self.data = [] 208 self. current_datainfo = DataInfo()204 self.parent_class = "SASentry" 209 205 self.names.append("SASentry") 210 self.parent_class = "SASentry" 206 self.current_datainfo.meta_data["loader"] = "CanSAS XML 1D" 207 self.current_datainfo.meta_data[ 208 PREPROCESS] = self.processing_instructions 209 if self._is_call_local() and not recurse: 210 basename, _ = os.path.splitext(os.path.basename(self.f_open.name)) 211 self.current_datainfo.filename = basename + self.extension 211 212 # Create an empty dataset if no data has been passed to the reader 212 213 if self.current_dataset is None: 213 self.current_dataset = plottable_1D(np.empty(0), np.empty(0), 214 np.empty(0), np.empty(0)) 214 self._initialize_new_data_set(dom) 215 215 self.base_ns = "{" + CANSAS_NS.get(self.cansas_version).get("ns") + "}" 216 216 … … 224 224 tagname_original = tagname 225 225 # Skip this iteration when loading in save state information 226 if tagname == "fitting_plug_in" or tagname == "pr_inversion" or tagname == "invariant":226 if tagname in ["fitting_plug_in", "pr_inversion", "invariant", "corfunc"]: 227 227 continue 228 228 # Get where to store content … … 254 254 self._add_intermediate() 255 255 else: 256 # TODO: Clean this up to make it faster (fewer if/elifs) 256 257 if isinstance(self.current_dataset, plottable_2D): 257 258 data_point = node.text … … 498 499 self.sort_two_d_data() 499 500 self.reset_data_list() 500 empty = None 501 return self.output[0], empty 502 503 def data_cleanup(self): 504 """ 505 Clean up the data sets and refresh everything 506 :return: None 507 """ 508 has_error_dx = self.current_dataset.dx is not None 509 has_error_dxl = self.current_dataset.dxl is not None 510 has_error_dxw = self.current_dataset.dxw is not None 511 has_error_dy = self.current_dataset.dy is not None 512 self.remove_empty_q_values(has_error_dx=has_error_dx, 513 has_error_dxl=has_error_dxl, 514 has_error_dxw=has_error_dxw, 515 has_error_dy=has_error_dy) 516 self.send_to_output() # Combine datasets with DataInfo 517 self.current_datainfo = DataInfo() # Reset DataInfo 501 return self.output[0], None 518 502 519 503 def _is_call_local(self): … … 549 533 self.aperture = Aperture() 550 534 elif self.parent_class == 'SASdata': 551 self._check_for_empty_resolution()552 535 self.data.append(self.current_dataset) 553 536 … … 605 588 if 'unit' in attr and attr.get('unit') is not None: 606 589 try: 607 local_unit = attr['unit'] 590 unit = attr['unit'] 591 unit_list = unit.split("|") 592 if len(unit_list) > 1: 593 self.current_dataset.xaxis(unit_list[0].strip(), 594 unit_list[1].strip()) 595 local_unit = unit_list[1] 596 else: 597 local_unit = unit 608 598 unitname = self.ns_list.current_level.get("unit", "") 609 599 if "SASdetector" in self.names: … … 659 649 return node_value, value_unit 660 650 661 def _check_for_empty_resolution(self):662 """663 a method to check all resolution data sets are the same size as I and q664 """665 dql_exists = False666 dqw_exists = False667 dq_exists = False668 di_exists = False669 if self.current_dataset.dxl is not None:670 dql_exists = True671 if self.current_dataset.dxw is not None:672 dqw_exists = True673 if self.current_dataset.dx is not None:674 dq_exists = True675 if self.current_dataset.dy is not None:676 di_exists = True677 if dqw_exists and not dql_exists:678 array_size = self.current_dataset.dxw.size679 self.current_dataset.dxl = np.zeros(array_size)680 elif dql_exists and not dqw_exists:681 array_size = self.current_dataset.dxl.size682 self.current_dataset.dxw = np.zeros(array_size)683 elif not dql_exists and not dqw_exists and not dq_exists:684 array_size = self.current_dataset.x.size685 self.current_dataset.dx = np.append(self.current_dataset.dx,686 np.zeros([array_size]))687 if not di_exists:688 array_size = self.current_dataset.y.size689 self.current_dataset.dy = np.append(self.current_dataset.dy,690 np.zeros([array_size]))691 692 651 def _initialize_new_data_set(self, node=None): 693 652 if node is not None: -
src/sas/sascalc/fit/pagestate.py
r574adc7 r574adc7 1226 1226 namespaces=CANSAS_NS) 1227 1227 for entry in entry_list: 1228 try:1229 sas_entry, _ = self._parse_save_state_entry(entry)1230 except:1231 raise1232 1228 fitstate = self._parse_state(entry) 1233 1234 1229 # state could be None when .svs file is loaded 1235 1230 # in this case, skip appending to output 1236 1231 if fitstate is not None: 1232 try: 1233 sas_entry, _ = self._parse_save_state_entry( 1234 entry) 1235 except: 1236 raise 1237 1237 sas_entry.meta_data['fitstate'] = fitstate 1238 1238 sas_entry.filename = fitstate.file -
src/sas/sasgui/perspectives/corfunc/corfunc_state.py
r2a399ca r1fa4f736 289 289 namespaces={'ns': CANSAS_NS}) 290 290 for entry in entry_list: 291 sas_entry, _ = self._parse_entry(entry)292 291 corstate = self._parse_state(entry) 293 292 294 293 if corstate is not None: 294 sas_entry, _ = self._parse_entry(entry) 295 295 sas_entry.meta_data['corstate'] = corstate 296 296 sas_entry.filename = corstate.file -
src/sas/sasgui/perspectives/fitting/fitpage.py
r9706d88 r48154abb 2049 2049 # Save state_fit 2050 2050 self.save_current_state_fit() 2051 self.onSmear(None) 2052 self._onDraw(None) 2051 2053 except: 2052 2054 self._show_combox_helper() -
src/sas/sasgui/perspectives/fitting/simfitpage.py
r00f7ff1 r00f7ff1 163 163 :return: None 164 164 """ 165 model_map = {} 165 init_map = {} 166 final_map = {} 166 167 # Process each model and associate old M# with new M# 167 168 i = 0 … … 175 176 model_map[saved_model.pop('fit_page_source')] = \ 176 177 model[3].name 178 check = bool(saved_model.pop('checked')) 179 self.model_list[i][0].SetValue(check) 180 inter_id = str(i)*5 181 init_map[saved_model.pop('fit_page_source')] = inter_id 182 final_map[inter_id] = model[3].name 177 183 check = bool(saved_model.pop('checked')) 178 184 self.model_list[i][0].SetValue(check) … … 1099 1105 cbox.Append(name, value) 1100 1106 cbox.SetStringSelection(selected) 1107 <<<<<<< HEAD 1108 ======= 1109 1110 1111 class SimFitPageState: 1112 """ 1113 State of the simultaneous fit page for saving purposes 1114 """ 1115 1116 def __init__(self): 1117 # Sim Fit Page Number 1118 self.fit_page_no = None 1119 # Select all data 1120 self.select_all = False 1121 # Data sets sent to fit page 1122 self.model_list = [] 1123 # Data sets to be fit 1124 self.model_to_fit = [] 1125 # Number of constraints 1126 self.no_constraint = 0 1127 # Dictionary of constraints 1128 self.constraint_dict = {} 1129 # List of constraints 1130 self.constraints_list = [] 1131 1132 def load_from_save_state(self, fit): 1133 """ 1134 Load in a simultaneous/constrained fit from a save state 1135 :param fit: Fitpanel object 1136 :return: None 1137 """ 1138 1139 init_map = {} 1140 final_map = {} 1141 if fit.fit_panel.sim_page is None: 1142 fit.fit_panel.add_sim_page() 1143 sim_page = fit.fit_panel.sim_page 1144 1145 # Process each model and associate old M# with new M# 1146 i = 0 1147 for model in sim_page.model_list: 1148 model_id = self._format_id(model[1].keys()[0]) 1149 for saved_model in self.model_list: 1150 save_id = saved_model.pop('name') 1151 saved_model['name'] = save_id 1152 save_id = self._format_id(save_id) 1153 if save_id == model_id: 1154 inter_id = str(i) + str(i) + str(i) + str(i) + str(i) 1155 init_map[saved_model.pop('fit_page_source')] = inter_id 1156 final_map[inter_id] = model[3].name 1157 check = bool(saved_model.pop('checked')) 1158 sim_page.model_list[i][0].SetValue(check) 1159 break 1160 i += 1 1161 sim_page.check_model_name(None) 1162 1163 if len(self.constraints_list) > 0: 1164 sim_page.hide_constraint.SetValue(False) 1165 sim_page.show_constraint.SetValue(True) 1166 sim_page._display_constraint(None) 1167 1168 for index, item in enumerate(self.constraints_list): 1169 model_cbox = item.pop('model_cbox') 1170 if model_cbox != "": 1171 constraint_value = item.pop('constraint') 1172 param = item.pop('param_cbox') 1173 equality = item.pop('egal_txt') 1174 for key, value in init_map.items(): 1175 model_cbox = model_cbox.replace(key, value) 1176 constraint_value = constraint_value.replace(key, value) 1177 for key, value in final_map.items(): 1178 model_cbox = model_cbox.replace(key, value) 1179 constraint_value = constraint_value.replace(key, value) 1180 1181 sim_page.constraints_list[index][0].SetValue(model_cbox) 1182 sim_page._on_select_model(None) 1183 sim_page.constraints_list[index][1].SetValue(param) 1184 sim_page.constraints_list[index][2].SetLabel(equality) 1185 sim_page.constraints_list[index][3].SetValue(constraint_value) 1186 sim_page._on_add_constraint(None) 1187 sim_page._manager.sim_page = sim_page 1188 1189 def _format_id(self, original_id): 1190 original_id = original_id.rstrip('1234567890.') 1191 new_id_list = original_id.split() 1192 new_id = ' '.join(new_id_list) 1193 return new_id 1194 >>>>>>> ticket-887-reorg -
src/sas/sasgui/perspectives/invariant/invariant_state.py
r7432acb r1fa4f736 728 728 729 729 for entry in entry_list: 730 731 sas_entry, _ = self._parse_entry(entry)732 730 invstate = self._parse_state(entry) 733 734 731 # invstate could be None when .svs file is loaded 735 732 # in this case, skip appending to output 736 733 if invstate is not None: 734 sas_entry, _ = self._parse_entry(entry) 737 735 sas_entry.meta_data['invstate'] = invstate 738 736 sas_entry.filename = invstate.file -
src/sas/sasgui/perspectives/pr/inversion_state.py
ra0e6b1b r1fa4f736 472 472 473 473 for entry in entry_list: 474 sas_entry, _ = self._parse_entry(entry)475 474 prstate = self._parse_prstate(entry) 476 475 #prstate could be None when .svs file is loaded 477 476 #in this case, skip appending to output 478 477 if prstate is not None: 478 sas_entry, _ = self._parse_entry(entry) 479 479 sas_entry.meta_data['prstate'] = prstate 480 480 sas_entry.filename = prstate.file -
src/sas/sasview/sasview.py
rb963b20 rb277220 183 183 # we are running may not actually need it; also, putting as little on the 184 184 # path as we can 185 os.environ['MPLBACKEND'] = backend 185 if backend: 186 os.environ['MPLBACKEND'] = backend 186 187 187 188 # TODO: ... so much for not importing matplotlib unless we need it...
Note: See TracChangeset
for help on using the changeset viewer.