Changes in / [1686a333:7f71637] in sasview
- Files:
-
- 17 added
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
.gitignore
re04f87b rdf332d8 8 8 # generated. 9 9 # 10 # Feel free to add more stuff to this as and when it becomes an issue. 10 # Feel free to add more stuff to this as and when it becomes an issue. 11 11 12 12 .project … … 50 50 /test/sasdataloader/test/plugins.zip 51 51 /test/sasdataloader/test/test_log.txt 52 /test/sasdataloader/test/isis_1_0_write_test.xml 53 /test/sasdataloader/test/isis_1_1_write_test.xml 54 /test/sasdataloader/test/write_test.xml 52 55 53 56 # autogenerated scripts 54 57 /sasview/installer.iss 55 -
docs/sphinx-docs/source/user/tools.rst
r8f46df7 reb8da5f 8 8 9 9 Data Operations Utility <sasgui/perspectives/calculator/data_operator_help> 10 10 11 11 Density/Volume Calculator <sasgui/perspectives/calculator/density_calculator_help> 12 12 13 13 Generic SANS Calculator <sasgui/perspectives/calculator/sas_calculator_help> 14 14 15 15 Image Viewer <sasgui/perspectives/calculator/image_viewer_help> 16 16 17 17 Kiessig Thickness Calculator <sasgui/perspectives/calculator/kiessig_calculator_help> 18 18 19 19 SLD Calculator <sasgui/perspectives/calculator/sld_calculator_help> 20 20 21 21 Slit Size Calculator <sasgui/perspectives/calculator/slit_calculator_help> 22 22 23 23 Q Resolution Estimator <sasgui/perspectives/calculator/resolution_calculator_help> 24 24 25 25 Python Shell <sasgui/perspectives/calculator/python_shell_help> 26 26 27 File Converter <sasgui/perspectives/file_converter/file_converter_help> -
run.py
r832fea2 r18e7309 72 72 platform = '%s-%s'%(get_platform(),sys.version[:3]) 73 73 build_path = joinpath(root, 'build','lib.'+platform) 74 75 # Notify the help menu that the Sphinx documentation is in a different 74 75 # Notify the help menu that the Sphinx documentation is in a different 76 76 # place than it otherwise would be. 77 77 os.environ['SASVIEW_DOC_PATH'] = joinpath(build_path, "doc") … … 123 123 # Compiled modules need to be pulled from the build directory. 124 124 # Some packages are not where they are needed, so load them explicitly. 125 import sas.sascalc.file_converter 126 sas.sascalc.file_converter.core = import_package('sas.sascalc.file_converter.core', 127 joinpath(build_path, 'sas', 'sascalc', 'file_converter', 'core')) 128 129 # Compiled modules need to be pulled from the build directory. 130 # Some packages are not where they are needed, so load them explicitly. 125 131 import sas.sascalc.calculator 126 132 sas.sascalc.calculator.core = import_package('sas.sascalc.calculator.core', -
sasview/sasview.py
re68c9bf re68c9bf 158 158 logging.error(traceback.format_exc()) 159 159 160 # File converter tool 161 try: 162 import sas.sasgui.perspectives.file_converter as module 163 converter_plug = module.Plugin() 164 self.gui.add_perspective(converter_plug) 165 except: 166 logging.error("%s: could not find File Converter plug-in module"% \ 167 APP_NAME) 168 logging.error(traceback.format_exc()) 169 160 170 161 171 # Add welcome page -
setup.py
rdb74ee8 r18e7309 9 9 from distutils.command.build_ext import build_ext 10 10 from distutils.core import Command 11 import numpy 11 12 12 13 # Manage version number ###################################### … … 54 55 print "Removing existing build directory", SASVIEW_BUILD, "for a clean build" 55 56 shutil.rmtree(SASVIEW_BUILD) 56 57 57 58 # 'sys.maxsize' and 64bit: Not supported for python2.5 58 59 is_64bits = False 59 60 if sys.version_info >= (2, 6): 60 61 is_64bits = sys.maxsize > 2**32 61 62 62 63 enable_openmp = False 63 64 … … 118 119 c = self.compiler.compiler_type 119 120 print "Compiling with %s (64bit=%s)" % (c, str(is_64bits)) 120 121 121 122 # OpenMP build options 122 123 if enable_openmp: … … 127 128 for e in self.extensions: 128 129 e.extra_link_args = lopt[ c ] 129 130 130 131 # Platform-specific build options 131 132 if platform_lopt.has_key(c): … … 205 206 ) 206 207 207 208 208 209 # sas.sascalc.pr 209 210 srcdir = os.path.join("src", "sas", "sascalc", "pr", "c_extensions") … … 217 218 include_dirs=[], 218 219 ) ) 219 220 221 # sas.sascalc.file_converter 222 mydir = os.path.join("src", "sas", "sascalc", "file_converter", "c_ext") 223 package_dir["sas.sascalc.file_converter.core"] = mydir 224 package_dir["sas.sascalc.file_converter"] = os.path.join("src","sas", "sascalc", "file_converter") 225 packages.extend(["sas.sascalc.file_converter","sas.sascalc.file_converter.core"]) 226 ext_modules.append( Extension("sas.sascalc.file_converter.core.bsl_loader", 227 sources = [os.path.join(mydir, "bsl_loader.c")], 228 include_dirs=[numpy.get_include()], 229 ) ) 230 220 231 # sas.sascalc.fit 221 232 package_dir["sas.sascalc.fit"] = os.path.join("src", "sas", "sascalc", "fit") … … 239 250 packages.extend(["sas.sasgui.perspectives", "sas.sasgui.perspectives.calculator"]) 240 251 package_data['sas.sasgui.perspectives.calculator'] = ['images/*', 'media/*'] 241 252 242 253 # Data util 243 254 package_dir["sas.sascalc.data_util"] = os.path.join("src", "sas", "sascalc", "data_util") … … 294 305 'test/2d_data/*', 295 306 'test/save_states/*', 296 'test/upcoming_formats/*', 307 'test/upcoming_formats/*', 297 308 'default_categories.json'] 298 309 packages.append("sas.sasview") … … 316 327 required.extend(['pillow']) 317 328 318 # Set up SasView 329 # Set up SasView 319 330 setup( 320 331 name="sasview", … … 341 352 'docs': BuildSphinxCommand, 342 353 'disable_openmp': DisableOpenMPCommand} 343 ) 354 ) -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r1686a333 r1686a333 1161 1161 written = written | self.write_node(pix, "z", item.pixel_size.z, 1162 1162 {"unit": item.pixel_size_unit}) 1163 written = written | self.write_node(det, "slit_length",1164 item.slit_length,1165 {"unit": item.slit_length_unit})1166 1163 if written == True: 1167 1164 self.append(pix, det) 1165 self.write_node(det, "slit_length", item.slit_length, 1166 {"unit": item.slit_length_unit}) 1167 1168 1168 1169 1169 def _write_process_notes(self, datainfo, entry_node): -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rd72567e r3d6ab79 62 62 :return: List of Data1D/2D objects and/or a list of errors. 63 63 """ 64 65 64 ## Reinitialize the class when loading a new data file to reset all class variables 66 65 self.reset_class_variables() … … 136 135 ## If this is a dataset, store the data appropriately 137 136 data_set = data[key][:] 137 unit = self._get_unit(value) 138 139 ## I and Q Data 140 if key == u'I': 141 if type(self.current_dataset) is plottable_2D: 142 self.current_dataset.data = data_set.flatten() 143 self.current_dataset.zaxis("Intensity", unit) 144 else: 145 self.current_dataset.y = data_set.flatten() 146 self.current_dataset.yaxis("Intensity", unit) 147 continue 148 elif key == u'Idev': 149 if type(self.current_dataset) is plottable_2D: 150 self.current_dataset.err_data = data_set.flatten() 151 else: 152 self.current_dataset.dy = data_set.flatten() 153 continue 154 elif key == u'Q': 155 self.current_dataset.xaxis("Q", unit) 156 if type(self.current_dataset) is plottable_2D: 157 self.current_dataset.q = data_set.flatten() 158 else: 159 self.current_dataset.x = data_set.flatten() 160 continue 161 elif key == u'Qy': 162 self.current_dataset.yaxis("Q_y", unit) 163 self.current_dataset.qy_data = data_set.flatten() 164 continue 165 elif key == u'Qydev': 166 self.current_dataset.dqy_data = data_set.flatten() 167 continue 168 elif key == u'Qx': 169 self.current_dataset.xaxis("Q_x", unit) 170 self.current_dataset.qx_data = data_set.flatten() 171 continue 172 elif key == u'Qxdev': 173 self.current_dataset.dqx_data = data_set.flatten() 174 continue 175 elif key == u'Mask': 176 self.current_dataset.mask = data_set.flatten() 177 continue 138 178 139 179 for data_point in data_set: 140 180 ## Top Level Meta Data 141 unit = self._get_unit(value)142 181 if key == u'definition': 143 182 self.current_datainfo.meta_data['reader'] = data_point … … 149 188 self.current_datainfo.notes.append(data_point) 150 189 151 ## I and Q Data152 elif key == u'I':153 if type(self.current_dataset) is plottable_2D:154 self.current_dataset.data = np.append(self.current_dataset.data, data_point)155 self.current_dataset.zaxis("Intensity", unit)156 else:157 self.current_dataset.y = np.append(self.current_dataset.y, data_point)158 self.current_dataset.yaxis("Intensity", unit)159 elif key == u'Idev':160 if type(self.current_dataset) is plottable_2D:161 self.current_dataset.err_data = np.append(self.current_dataset.err_data, data_point)162 else:163 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point)164 elif key == u'Q':165 self.current_dataset.xaxis("Q", unit)166 if type(self.current_dataset) is plottable_2D:167 self.current_dataset.q = np.append(self.current_dataset.q, data_point)168 else:169 self.current_dataset.x = np.append(self.current_dataset.x, data_point)170 elif key == u'Qy':171 self.current_dataset.yaxis("Q_y", unit)172 self.current_dataset.qy_data = np.append(self.current_dataset.qy_data, data_point)173 elif key == u'Qydev':174 self.current_dataset.dqy_data = np.append(self.current_dataset.dqy_data, data_point)175 elif key == u'Qx':176 self.current_dataset.xaxis("Q_x", unit)177 self.current_dataset.qx_data = np.append(self.current_dataset.qx_data, data_point)178 elif key == u'Qxdev':179 self.current_dataset.dqx_data = np.append(self.current_dataset.dqx_data, data_point)180 elif key == u'Mask':181 self.current_dataset.mask = np.append(self.current_dataset.mask, data_point)182 183 190 ## Sample Information 184 elif key == u'Title' and self.parent_class == u'SASsample': 191 elif key == u'Title' and self.parent_class == u'SASsample': # CanSAS 2.0 format 192 self.current_datainfo.sample.name = data_point 193 elif key == u'name' and self.parent_class == u'SASsample': # NXcanSAS format 185 194 self.current_datainfo.sample.name = data_point 186 195 elif key == u'thickness' and self.parent_class == u'SASsample': … … 206 215 elif key == u'name' and self.parent_class == u'SASprocess': 207 216 self.process.name = data_point 208 elif key == u'Title' and self.parent_class == u'SASprocess': 217 elif key == u'Title' and self.parent_class == u'SASprocess': # CanSAS 2.0 format 218 self.process.name = data_point 219 elif key == u'name' and self.parent_class == u'SASprocess': # NXcanSAS format 209 220 self.process.name = data_point 210 221 elif key == u'description' and self.parent_class == u'SASprocess': … … 296 307 ## Type cast data arrays to float64 and find min/max as appropriate 297 308 for dataset in self.data2d: 298 dataset.data = np.delete(dataset.data, [0])299 309 dataset.data = dataset.data.astype(np.float64) 300 dataset.err_data = np.delete(dataset.err_data, [0])301 310 dataset.err_data = dataset.err_data.astype(np.float64) 302 dataset.mask = np.delete(dataset.mask, [0])303 311 if dataset.qx_data is not None: 304 dataset.qx_data = np.delete(dataset.qx_data, [0])305 312 dataset.xmin = np.min(dataset.qx_data) 306 313 dataset.xmax = np.max(dataset.qx_data) 307 314 dataset.qx_data = dataset.qx_data.astype(np.float64) 308 315 if dataset.dqx_data is not None: 309 dataset.dqx_data = np.delete(dataset.dqx_data, [0])310 316 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 311 317 if dataset.qy_data is not None: 312 dataset.qy_data = np.delete(dataset.qy_data, [0])313 318 dataset.ymin = np.min(dataset.qy_data) 314 319 dataset.ymax = np.max(dataset.qy_data) 315 320 dataset.qy_data = dataset.qy_data.astype(np.float64) 316 321 if dataset.dqy_data is not None: 317 dataset.dqy_data = np.delete(dataset.dqy_data, [0])318 322 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 319 323 if dataset.q_data is not None: 320 dataset.q_data = np.delete(dataset.q_data, [0])321 324 dataset.q_data = dataset.q_data.astype(np.float64) 322 325 zeros = np.ones(dataset.data.size, dtype=bool) … … 338 341 for dataset in self.data1d: 339 342 if dataset.x is not None: 340 dataset.x = np.delete(dataset.x, [0])341 343 dataset.x = dataset.x.astype(np.float64) 342 344 dataset.xmin = np.min(dataset.x) 343 345 dataset.xmax = np.max(dataset.x) 344 346 if dataset.y is not None: 345 dataset.y = np.delete(dataset.y, [0])346 347 dataset.y = dataset.y.astype(np.float64) 347 348 dataset.ymin = np.min(dataset.y) 348 349 dataset.ymax = np.max(dataset.y) 349 350 if dataset.dx is not None: 350 dataset.dx = np.delete(dataset.dx, [0])351 351 dataset.dx = dataset.dx.astype(np.float64) 352 352 if dataset.dxl is not None: 353 dataset.dxl = np.delete(dataset.dxl, [0])354 353 dataset.dxl = dataset.dxl.astype(np.float64) 355 354 if dataset.dxw is not None: 356 dataset.dxw = np.delete(dataset.dxw, [0])357 355 dataset.dxw = dataset.dxw.astype(np.float64) 358 356 if dataset.dy is not None: 359 dataset.dy = np.delete(dataset.dy, [0])360 357 dataset.dy = dataset.dy.astype(np.float64) 361 358 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) … … 375 372 self.data2d = [] 376 373 self.current_datainfo = DataInfo() 374 377 375 378 376 def _initialize_new_data_set(self, parent_list = None):
Note: See TracChangeset
for help on using the changeset viewer.