Changeset e4f421c in sasview
- Timestamp:
- Mar 2, 2015 2:55:36 PM (10 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, release_4.0.1, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- e3f77d8b
- Parents:
- c43e875
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/dataloader/data_info.py
r5e326a6 re4f421c 2 2 Module that contains classes to hold information read from 3 3 reduced data files. 4 4 5 5 A good description of the data members can be found in 6 6 the CanSAS 1D XML data format: 7 7 8 8 http://www.smallangles.net/wgwiki/index.php/cansas1d_documentation 9 9 """ … … 26 26 import math 27 27 28 class plottable_sesans1D :28 class plottable_sesans1D(object): 29 29 """ 30 30 SESANS is a place holder for 1D SESANS plottables. 31 31 32 32 #TODO: This was directly copied from the plottables_1D. Modified Somewhat. 33 33 #Class has been updated. … … 45 45 ## Slit smearing width 46 46 dxw = None 47 47 48 48 # Units 49 49 _xaxis = '' … … 51 51 _yaxis = '' 52 52 _yunit = '' 53 53 54 54 def __init__(self, x, y, lam, dx=None, dy=None, dlam=None): 55 55 # print "SESANS plottable working" … … 63 63 if dlam is not None: 64 64 self.dlam = numpy.asarray(dlam) 65 # if dxl is not None: 66 # self.dxl = numpy.asarray(dxl) 67 # if dxw is not None: 68 # self.dxw = numpy.asarray(dxw) 69 # print "SESANS plottable init fin" 65 70 66 def xaxis(self, label, unit): 71 67 """ … … 74 70 self._xaxis = label 75 71 self._xunit = unit 76 print "xaxis active" 77 print label 78 print unit 72 79 73 def yaxis(self, label, unit): 80 74 """ 81 75 set the y axis label and unit 82 76 """ 83 print "yaxis active"84 print label85 print unit86 87 77 self._yaxis = label 88 78 self._yunit = unit 89 79 90 80 91 class plottable_1D :81 class plottable_1D(object): 92 82 """ 93 83 Data1D is a place holder for 1D plottables. … … 103 93 ## Slit smearing width 104 94 dxw = None 105 95 106 96 # Units 107 97 _xaxis = '' … … 109 99 _yaxis = '' 110 100 _yunit = '' 111 101 112 102 def __init__(self, x, y, dx=None, dy=None, dxl=None, dxw=None): 113 103 self.x = numpy.asarray(x) … … 119 109 if dxl is not None: 120 110 self.dxl = numpy.asarray(dxl) 121 if dxw is not None: 111 if dxw is not None: 122 112 self.dxw = numpy.asarray(dxw) 123 113 … … 128 118 self._xaxis = label 129 119 self._xunit = unit 130 120 131 121 def yaxis(self, label, unit): 132 122 """ … … 137 127 138 128 139 class plottable_2D :129 class plottable_2D(object): 140 130 """ 141 131 Data2D is a place holder for 2D plottables. … … 153 143 dqy_data = None 154 144 mask = None 155 145 156 146 # Units 157 147 _xaxis = '' … … 161 151 _zaxis = '' 162 152 _zunit = '' 163 153 164 154 def __init__(self, data=None, err_data=None, qx_data=None, 165 155 qy_data=None, q_data=None, mask=None, … … 172 162 self.err_data = numpy.asarray(err_data) 173 163 if dqx_data is not None: 174 self.dqx_data = numpy.asarray(dqx_data) 164 self.dqx_data = numpy.asarray(dqx_data) 175 165 if dqy_data is not None: 176 self.dqy_data = numpy.asarray(dqy_data) 177 166 self.dqy_data = numpy.asarray(dqy_data) 167 178 168 def xaxis(self, label, unit): 179 169 """ … … 182 172 self._xaxis = label 183 173 self._xunit = unit 184 174 185 175 def yaxis(self, label, unit): 186 176 """ … … 189 179 self._yaxis = label 190 180 self._yunit = unit 191 181 192 182 def zaxis(self, label, unit): 193 183 """ … … 197 187 self._zunit = unit 198 188 199 200 class Vector :189 190 class Vector(object): 201 191 """ 202 192 Vector class to hold multi-dimensional objects … … 208 198 ## z component 209 199 z = None 210 200 211 201 def __init__(self, x=None, y=None, z=None): 212 202 """ 213 203 Initialization. Components that are not 214 204 set a set to None by default. 215 205 216 206 :param x: x component 217 207 :param y: y component 218 208 :param z: z component 219 220 209 """ 221 210 self.x = x 222 211 self.y = y 223 212 self.z = z 224 213 225 214 def __str__(self): 226 215 msg = "x = %s\ty = %s\tz = %s" % (str(self.x), str(self.y), str(self.z)) 227 216 return msg 228 229 230 class Detector :217 218 219 class Detector(object): 231 220 """ 232 221 Class to hold detector information … … 255 244 slit_length = None 256 245 slit_length_unit = 'mm' 257 246 258 247 def __init__(self): 259 248 """ 260 261 249 Initialize class attribute that are objects... 262 263 """ 264 self.offset = Vector() 250 """ 251 self.offset = Vector() 265 252 self.orientation = Vector() 266 253 self.beam_center = Vector() 267 self.pixel_size 268 254 self.pixel_size = Vector() 255 269 256 def __str__(self): 270 _str 257 _str = "Detector:\n" 271 258 _str += " Name: %s\n" % self.name 272 259 _str += " Distance: %s [%s]\n" % \ … … 285 272 286 273 287 class Aperture :274 class Aperture(object): 288 275 ## Name 289 276 name = None … … 298 285 distance = None 299 286 distance_unit = 'mm' 300 287 301 288 def __init__(self): 302 289 self.size = Vector() 303 304 305 class Collimation :290 291 292 class Collimation(object): 306 293 """ 307 294 Class to hold collimation information … … 314 301 ## Aperture 315 302 aperture = None 316 303 317 304 def __init__(self): 318 305 self.aperture = [] 319 306 320 307 def __str__(self): 321 308 _str = "Collimation:\n" … … 330 317 331 318 332 class Source :319 class Source(object): 333 320 """ 334 321 Class to hold source information … … 357 344 wavelength_spread = None 358 345 wavelength_spread_unit = 'percent' 359 346 360 347 def __init__(self): 361 348 self.beam_size = Vector() 362 349 363 350 def __str__(self): 364 _str 351 _str = "Source:\n" 365 352 _str += " Radiation: %s\n" % str(self.radiation) 366 353 _str += " Shape: %s\n" % str(self.beam_shape) … … 376 363 (str(self.beam_size), str(self.beam_size_unit)) 377 364 return _str 378 379 365 366 380 367 """ 381 368 Definitions of radiation types 382 369 """ 383 NEUTRON 384 XRAY 385 MUON 370 NEUTRON = 'neutron' 371 XRAY = 'x-ray' 372 MUON = 'muon' 386 373 ELECTRON = 'electron' 387 388 389 class Sample :374 375 376 class Sample(object): 390 377 """ 391 378 Class to hold the sample description … … 411 398 ## Details 412 399 details = None 413 400 414 401 def __init__(self): 415 self.position 402 self.position = Vector() 416 403 self.orientation = Vector() 417 self.details 418 404 self.details = [] 405 419 406 def __str__(self): 420 _str 407 _str = "Sample:\n" 421 408 _str += " ID: %s\n" % str(self.ID) 422 409 _str += " Transmission: %s\n" % str(self.transmission) … … 429 416 _str += " Orientation: %s [%s]\n" % \ 430 417 (str(self.orientation), str(self.orientation_unit)) 431 418 432 419 _str += " Details:\n" 433 420 for item in self.details: 434 421 _str += " %s\n" % item 435 422 436 423 return _str 437 438 439 class Process :424 425 426 class Process(object): 440 427 """ 441 428 Class that holds information about the processes … … 447 434 term = None 448 435 notes = None 449 436 450 437 def __init__(self): 451 438 self.term = [] 452 439 self.notes = [] 453 440 454 441 def __str__(self): 455 _str 442 _str = "Process:\n" 456 443 _str += " Name: %s\n" % self.name 457 444 _str += " Date: %s\n" % self.date … … 462 449 _str += " Note: %s\n" % item 463 450 return _str 464 465 class TransmissionSpectrum: 451 452 453 class TransmissionSpectrum(object): 466 454 """ 467 455 Class that holds information about transmission spectrum … … 479 467 transmission_deviation = None 480 468 transmission_deviation_unit = '' 481 469 482 470 def __init__(self): 483 471 self.wavelength = [] 484 472 self.transmission = [] 485 473 self.transmission_deviation = [] 486 474 487 475 def __str__(self): 488 _str 476 _str = "Transmission Spectrum:\n" 489 477 _str += " Name: \t{0}\n".format(self.name) 490 478 _str += " Timestamp: \t{0}\n".format(self.timestamp) … … 497 485 _str += " Number of Pts: \t{0}\n".format(max(length_list)) 498 486 return _str 499 500 501 class DataInfo :487 488 489 class DataInfo(object): 502 490 """ 503 491 Class to hold the data read from a file. … … 507 495 """ 508 496 ## Title 509 title 497 title = '' 510 498 ## Run number 511 run 499 run = None 512 500 ## Run name 513 run_name 501 run_name = None 514 502 ## File name 515 filename 503 filename = '' 516 504 ## Notes 517 notes 505 notes = None 518 506 ## Processes (Action on the data) 519 process 507 process = None 520 508 ## Instrument name 521 509 instrument = '' 522 510 ## Detector information 523 detector 511 detector = None 524 512 ## Sample information 525 sample 513 sample = None 526 514 ## Source information 527 source 515 source = None 528 516 ## Collimation information 529 517 collimation = None … … 531 519 trans_spectrum = None 532 520 ## Additional meta-data 533 meta_data 521 meta_data = None 534 522 ## Loading errors 535 523 errors = None 536 524 537 525 def __init__(self): 538 526 """ 539 527 Initialization 540 528 """ 541 ## Title 542 self.title 529 ## Title 530 self.title = '' 543 531 ## Run number 544 self.run 545 self.run_name 532 self.run = [] 533 self.run_name = {} 546 534 ## File name 547 self.filename 535 self.filename = '' 548 536 ## Notes 549 self.notes 537 self.notes = [] 550 538 ## Processes (Action on the data) 551 self.process 539 self.process = [] 552 540 ## Instrument name 553 541 self.instrument = '' 554 542 ## Detector information 555 self.detector 543 self.detector = [] 556 544 ## Sample information 557 self.sample 545 self.sample = Sample() 558 546 ## Source information 559 self.source 547 self.source = Source() 560 548 ## Collimation information 561 549 self.collimation = [] … … 563 551 self.trans_spectrum = [] 564 552 ## Additional meta-data 565 self.meta_data 553 self.meta_data = {} 566 554 ## Loading errors 567 555 self.errors = [] 568 556 569 557 def append_empty_process(self): 570 558 """ 571 559 """ 572 560 self.process.append(Process()) 573 561 574 562 def add_notes(self, message=""): 575 563 """ … … 577 565 """ 578 566 self.notes.append(message) 579 567 580 568 def __str__(self): 581 569 """ 582 570 Nice printout 583 571 """ 584 _str = 572 _str = "File: %s\n" % self.filename 585 573 _str += "Title: %s\n" % self.title 586 574 _str += "Run: %s\n" % str(self.run) … … 599 587 _str += "%s\n" % str(item) 600 588 return _str 601 589 602 590 # Private method to perform operation. Not implemented for DataInfo, 603 591 # but should be implemented for each data class inherited from DataInfo … … 610 598 """ 611 599 return NotImplemented 612 600 613 601 def _perform_union(self, other): 614 602 """ … … 622 610 """ 623 611 Add two data sets 624 612 625 613 :param other: data set to add to the current one 626 614 :return: new data set … … 630 618 return a + b 631 619 return self._perform_operation(other, operation) 632 620 633 621 def __radd__(self, other): 634 622 """ 635 623 Add two data sets 636 624 637 625 :param other: data set to add to the current one 638 639 626 :return: new data set 640 641 627 :raise ValueError: raised when two data sets are incompatible 642 643 628 """ 644 629 def operation(a, b): 645 630 return b + a 646 631 return self._perform_operation(other, operation) 647 632 648 633 def __sub__(self, other): 649 634 """ 650 635 Subtract two data sets 651 636 652 637 :param other: data set to subtract from the current one 653 654 638 :return: new data set 655 656 639 :raise ValueError: raised when two data sets are incompatible 657 658 640 """ 659 641 def operation(a, b): 660 642 return a - b 661 643 return self._perform_operation(other, operation) 662 644 663 645 def __rsub__(self, other): 664 646 """ 665 647 Subtract two data sets 666 648 667 649 :param other: data set to subtract from the current one 668 669 650 :return: new data set 670 671 651 :raise ValueError: raised when two data sets are incompatible 672 673 652 """ 674 653 def operation(a, b): 675 654 return b - a 676 655 return self._perform_operation(other, operation) 677 656 678 657 def __mul__(self, other): 679 658 """ 680 659 Multiply two data sets 681 660 682 661 :param other: data set to subtract from the current one 683 684 662 :return: new data set 685 686 663 :raise ValueError: raised when two data sets are incompatible 687 688 664 """ 689 665 def operation(a, b): 690 666 return a * b 691 667 return self._perform_operation(other, operation) 692 668 693 669 def __rmul__(self, other): 694 670 """ 695 671 Multiply two data sets 696 672 697 673 :param other: data set to subtract from the current one 698 699 674 :return: new data set 700 701 675 :raise ValueError: raised when two data sets are incompatible 702 676 """ … … 704 678 return b * a 705 679 return self._perform_operation(other, operation) 706 680 707 681 def __div__(self, other): 708 682 """ 709 683 Divided a data set by another 710 684 711 685 :param other: data set that the current one is divided by 712 713 686 :return: new data set 714 715 687 :raise ValueError: raised when two data sets are incompatible 716 717 688 """ 718 689 def operation(a, b): 719 690 return a/b 720 691 return self._perform_operation(other, operation) 721 692 722 693 def __rdiv__(self, other): 723 694 """ 724 695 Divided a data set by another 725 696 726 697 :param other: data set that the current one is divided by 727 728 698 :return: new data set 729 730 699 :raise ValueError: raised when two data sets are incompatible 731 732 700 """ 733 701 def operation(a, b): 734 702 return b/a 735 703 return self._perform_operation(other, operation) 736 737 704 738 705 def __or__(self, other): 739 706 """ 740 707 Union a data set with another 741 708 742 709 :param other: data set to be unified 743 744 710 :return: new data set 745 746 711 :raise ValueError: raised when two data sets are incompatible 747 748 712 """ 749 713 return self._perform_union(other) 750 714 751 715 def __ror__(self, other): 752 716 """ 753 717 Union a data set with another 754 718 755 719 :param other: data set to be unified 756 757 720 :return: new data set 758 759 721 :raise ValueError: raised when two data sets are incompatible 760 761 722 """ 762 723 return self._perform_union(other) 763 724 764 725 class SESANSData1D(plottable_sesans1D, DataInfo): 765 726 """ … … 768 729 x_unit = 'nm' 769 730 y_unit = 'a.u.' 770 731 771 732 def __init__(self, x=None, y=None, lam=None, dy=None, dx=None, dlam=None): 772 # print "dat init"773 733 DataInfo.__init__(self) 774 # print "dat init fin"775 734 plottable_sesans1D.__init__(self, x, y, lam, dx, dy, dlam) 776 # print "SESANSdata1D init" 735 777 736 def __str__(self): 778 737 """ 779 738 Nice printout 780 739 """ 781 # print "string printer active" 782 _str = "%s\n" % DataInfo.__str__(self) 783 740 _str = "%s\n" % DataInfo.__str__(self) 784 741 _str += "Data:\n" 785 742 _str += " Type: %s\n" % self.__class__.__name__ … … 787 744 _str += " Y-axis: %s\t[%s]\n" % (self._yaxis, self._yunit) 788 745 _str += " Length: %g\n" % len(self.x) 789 # print _str790 746 return _str 791 # 792 # def is_slit_smeared(self): 793 # """ 794 # Check whether the data has slit smearing information 795 # 796 # :return: True is slit smearing info is present, False otherwise 797 # 798 # """ 799 # def _check(v): 800 # if (v.__class__ == list or v.__class__ == numpy.ndarray) \ 801 # and len(v) > 0 and min(v) > 0: 802 # return True 803 # 804 # return False 805 # 806 # return _check(self.dxl) or _check(self.dxw) 807 747 808 748 def clone_without_data(self, length=0, clone=None): 809 749 """ … … 811 751 will be filled out by a subsequent operation). 812 752 The data arrays will be initialized to zero. 813 753 814 754 :param length: length of the data array to be initialized 815 755 :param clone: if provided, the data will be copied to clone 816 756 """ 817 757 from copy import deepcopy 818 # print " SESANS data 1D clone active"819 758 if clone is None or not issubclass(clone.__class__, Data1D): 820 x 759 x = numpy.zeros(length) 821 760 dx = numpy.zeros(length) 822 lam = numpy.zeros(length) 823 dlam = numpy.zeros(length) 824 y = numpy.zeros(length) 761 y = numpy.zeros(length) 825 762 dy = numpy.zeros(length) 826 763 clone = Data1D(x, y, dx=dx, dy=dy) 827 828 clone.title 829 clone.run 830 clone.filename 831 clone.instrument 832 clone.notes 833 clone.process 834 clone.detector 835 clone.sample 836 clone.source 837 clone.collimation 764 765 clone.title = self.title 766 clone.run = self.run 767 clone.filename = self.filename 768 clone.instrument = self.instrument 769 clone.notes = deepcopy(self.notes) 770 clone.process = deepcopy(self.process) 771 clone.detector = deepcopy(self.detector) 772 clone.sample = deepcopy(self.sample) 773 clone.source = deepcopy(self.source) 774 clone.collimation = deepcopy(self.collimation) 838 775 clone.trans_spectrum = deepcopy(self.trans_spectrum) 839 clone.meta_data 840 clone.errors 841 # print "SESANS Data 1Dclone done" 776 clone.meta_data = deepcopy(self.meta_data) 777 clone.errors = deepcopy(self.errors) 778 842 779 return clone 843 780 844 781 class Data1D(plottable_1D, DataInfo): 845 782 """ … … 848 785 x_unit = '1/A' 849 786 y_unit = '1/cm' 850 787 851 788 def __init__(self, x, y, dx=None, dy=None): 852 789 DataInfo.__init__(self) 853 790 plottable_1D.__init__(self, x, y, dx, dy) 854 791 855 792 def __str__(self): 856 793 """ 857 794 Nice printout 858 795 """ 859 _str = "%s\n" % DataInfo.__str__(self) 860 796 _str = "%s\n" % DataInfo.__str__(self) 861 797 _str += "Data:\n" 862 798 _str += " Type: %s\n" % self.__class__.__name__ … … 864 800 _str += " Y-axis: %s\t[%s]\n" % (self._yaxis, self._yunit) 865 801 _str += " Length: %g\n" % len(self.x) 866 867 802 return _str 868 803 … … 870 805 """ 871 806 Check whether the data has slit smearing information 872 873 807 :return: True is slit smearing info is present, False otherwise 874 875 808 """ 876 809 def _check(v): … … 878 811 and len(v) > 0 and min(v) > 0: 879 812 return True 880 881 813 return False 882 883 814 return _check(self.dxl) or _check(self.dxw) 884 815 885 816 def clone_without_data(self, length=0, clone=None): 886 817 """ … … 888 819 will be filled out by a subsequent operation). 889 820 The data arrays will be initialized to zero. 890 821 891 822 :param length: length of the data array to be initialized 892 823 :param clone: if provided, the data will be copied to clone 893 824 """ 894 825 from copy import deepcopy 895 826 896 827 if clone is None or not issubclass(clone.__class__, Data1D): 897 x 828 x = numpy.zeros(length) 898 829 dx = numpy.zeros(length) 899 y 830 y = numpy.zeros(length) 900 831 dy = numpy.zeros(length) 901 832 clone = Data1D(x, y, dx=dx, dy=dy) 902 903 clone.title 904 clone.run 905 clone.filename 906 clone.instrument 907 clone.notes 908 clone.process 909 clone.detector 910 clone.sample 911 clone.source 912 clone.collimation 833 834 clone.title = self.title 835 clone.run = self.run 836 clone.filename = self.filename 837 clone.instrument = self.instrument 838 clone.notes = deepcopy(self.notes) 839 clone.process = deepcopy(self.process) 840 clone.detector = deepcopy(self.detector) 841 clone.sample = deepcopy(self.sample) 842 clone.source = deepcopy(self.source) 843 clone.collimation = deepcopy(self.collimation) 913 844 clone.trans_spectrum = deepcopy(self.trans_spectrum) 914 clone.meta_data 915 clone.errors 916 845 clone.meta_data = deepcopy(self.meta_data) 846 clone.errors = deepcopy(self.errors) 847 917 848 return clone 918 849 … … 924 855 errors vectors if they were present or vectors 925 856 of zeros when none was found. 926 857 927 858 :param other: other data set for operation 928 929 859 :return: dy for self, dy for other [numpy arrays] 930 931 860 :raise ValueError: when lengths are not compatible 932 933 861 """ 934 862 dy_other = None … … 939 867 msg = "Unable to perform operation: data length are not equal" 940 868 raise ValueError, msg 941 942 869 # Here we could also extrapolate between data points 943 870 ZERO = 1.0e-12 … … 946 873 msg = "Incompatible data sets: x-values do not match" 947 874 raise ValueError, msg 948 """ 949 if self.dxl != None and other.dxl == None: 950 msg = "Incompatible data sets: dxl-values do not match" 951 raise ValueError, msg 952 if self.dxl == None and other.dxl != None: 953 msg = "Incompatible data sets: dxl-values do not match" 954 raise ValueError, msg 955 if self.dxw != None and other.dxw == None: 956 msg = "Incompatible data sets: dxw-values do not match" 957 raise ValueError, msg 958 if self.dxw == None and other.dxw != None: 959 msg = "Incompatible data sets: dxw-values do not match" 960 raise ValueError, msg 961 """ 875 962 876 # Check that the other data set has errors, otherwise 963 877 # create zero vector … … 965 879 if other.dy == None or (len(other.dy) != len(other.y)): 966 880 dy_other = numpy.zeros(len(other.y)) 967 881 968 882 # Check that we have errors, otherwise create zero vector 969 883 dy = self.dy 970 884 if self.dy == None or (len(self.dy) != len(self.y)): 971 885 dy = numpy.zeros(len(self.y)) 972 886 973 887 return dy, dy_other 974 888 … … 996 910 if self.dxl is not None and len(self.x) == len(self.dxl): 997 911 result.dxl[i] = self.dxl[i] 998 912 999 913 a = Uncertainty(self.y[i], dy[i]**2) 1000 914 if isinstance(other, Data1D): … … 1012 926 else: 1013 927 b = other 1014 928 1015 929 output = operation(a, b) 1016 930 result.y[i] = output.x 1017 931 result.dy[i] = math.sqrt(math.fabs(output.variance)) 1018 932 return result 1019 933 1020 934 def _validity_check_union(self, other): 1021 935 """ … … 1025 939 errors vectors if they were present or vectors 1026 940 of zeros when none was found. 1027 941 1028 942 :param other: other data set for operation 1029 1030 943 :return: bool 1031 1032 944 :raise ValueError: when data types are not compatible 1033 1034 945 """ 1035 946 if not isinstance(other, Data1D): 1036 947 msg = "Unable to perform operation: different types of data set" 1037 raise ValueError, msg 948 raise ValueError, msg 1038 949 return True 1039 950 … … 1080 991 result.dxl = result.dxl[ind] 1081 992 return result 1082 1083 993 994 1084 995 class Data2D(plottable_2D, DataInfo): 1085 996 """ … … 1088 999 ## Units for Q-values 1089 1000 Q_unit = '1/A' 1090 1091 1001 ## Units for I(Q) values 1092 1002 I_unit = '1/cm' 1093 1094 1003 ## Vector of Q-values at the center of each bin in x 1095 1004 x_bins = None 1096 1097 1005 ## Vector of Q-values at the center of each bin in y 1098 1006 y_bins = None 1099 1007 1100 1008 def __init__(self, data=None, err_data=None, qx_data=None, 1101 1009 qy_data=None, q_data=None, mask=None, … … 1111 1019 def __str__(self): 1112 1020 _str = "%s\n" % DataInfo.__str__(self) 1113 1114 1021 _str += "Data:\n" 1115 1022 _str += " Type: %s\n" % self.__class__.__name__ 1116 1023 _str += " X- & Y-axis: %s\t[%s]\n" % (self._yaxis, self._yunit) 1117 1024 _str += " Z-axis: %s\t[%s]\n" % (self._zaxis, self._zunit) 1118 #leny = 01119 #if len(self.data) > 0:1120 # leny = len(self.data)1121 1025 _str += " Length: %g \n" % (len(self.data)) 1122 1123 1026 return _str 1124 1027 1125 1028 def clone_without_data(self, length=0, clone=None): 1126 1029 """ … … 1128 1031 will be filled out by a subsequent operation). 1129 1032 The data arrays will be initialized to zero. 1130 1033 1131 1034 :param length: length of the data array to be initialized 1132 1035 :param clone: if provided, the data will be copied to clone 1133 1036 """ 1134 1037 from copy import deepcopy 1135 1038 1136 1039 if clone is None or not issubclass(clone.__class__, Data2D): 1137 1040 data = numpy.zeros(length) … … 1143 1046 dqx_data = None 1144 1047 dqy_data = None 1145 clone = Data2D(data=data, err_data=err_data, 1146 qx_data=qx_data, qy_data=qy_data, 1048 clone = Data2D(data=data, err_data=err_data, 1049 qx_data=qx_data, qy_data=qy_data, 1147 1050 q_data=q_data, mask=mask) 1148 1051 1149 clone.title 1150 clone.run 1151 clone.filename 1152 clone.instrument 1153 clone.notes 1154 clone.process 1155 clone.detector 1156 clone.sample 1157 clone.source 1052 clone.title = self.title 1053 clone.run = self.run 1054 clone.filename = self.filename 1055 clone.instrument = self.instrument 1056 clone.notes = deepcopy(self.notes) 1057 clone.process = deepcopy(self.process) 1058 clone.detector = deepcopy(self.detector) 1059 clone.sample = deepcopy(self.sample) 1060 clone.source = deepcopy(self.source) 1158 1061 clone.collimation = deepcopy(self.collimation) 1159 clone.meta_data 1160 clone.errors 1161 1062 clone.meta_data = deepcopy(self.meta_data) 1063 clone.errors = deepcopy(self.errors) 1064 1162 1065 return clone 1163 1066 1164 1067 def _validity_check(self, other): 1165 1068 """ … … 1169 1072 errors vectors if they were present or vectors 1170 1073 of zeros when none was found. 1171 1074 1172 1075 :param other: other data set for operation 1173 1174 1076 :return: dy for self, dy for other [numpy arrays] 1175 1176 1077 :raise ValueError: when lengths are not compatible 1177 1178 1078 """ 1179 1079 err_other = None … … 1185 1085 msg = "Unable to perform operation: data length are not equal" 1186 1086 raise ValueError, msg 1187 #if len(self.data) < 1:1188 # msg = "Incompatible data sets: I-values do not match"1189 # raise ValueError, msg1190 1087 for ind in range(len(self.data)): 1191 1088 if self.qx_data[ind] != other.qx_data[ind]: … … 1195 1092 msg = "Incompatible data sets: qy-values do not match" 1196 1093 raise ValueError, msg 1197 1094 1198 1095 # Check that the scales match 1199 1096 err_other = other.err_data … … 1201 1098 (len(other.err_data) != len(other.data)): 1202 1099 err_other = numpy.zeros(len(other.data)) 1203 1100 1204 1101 # Check that we have errors, otherwise create zero vector 1205 1102 err = self.err_data … … 1207 1104 (len(self.err_data) != len(self.data)): 1208 1105 err = numpy.zeros(len(other.data)) 1209 1210 1106 return err, err_other 1211 1107 1212 1108 def _perform_operation(self, other, operation): 1213 1109 """ 1214 1110 Perform 2D operations between data sets 1215 1111 1216 1112 :param other: other data set 1217 1113 :param operation: function defining the operation 1218 1219 1114 """ 1220 1115 # First, check the data compatibility … … 1231 1126 if self.err_data is not None and \ 1232 1127 numpy.size(self.data) == numpy.size(self.err_data): 1233 result.err_data[i] = self.err_data[i] 1128 result.err_data[i] = self.err_data[i] 1234 1129 if self.dqx_data is not None: 1235 1130 result.dqx_data[i] = self.dqx_data[i] … … 1240 1135 result.q_data[i] = self.q_data[i] 1241 1136 result.mask[i] = self.mask[i] 1242 1137 1243 1138 a = Uncertainty(self.data[i], dy[i]**2) 1244 1139 if isinstance(other, Data2D): … … 1249 1144 result.dqx_data[i] += (other.dqx_data[i]**2) 1250 1145 result.dqx_data[i] /= 2 1251 result.dqx_data[i] = math.sqrt(result.dqx_data[i]) 1146 result.dqx_data[i] = math.sqrt(result.dqx_data[i]) 1252 1147 if other.dqy_data is not None and \ 1253 1148 result.dqy_data is not None: … … 1258 1153 else: 1259 1154 b = other 1260 1261 1155 output = operation(a, b) 1262 1156 result.data[i] = output.x 1263 1157 result.err_data[i] = math.sqrt(math.fabs(output.variance)) 1264 1158 return result 1265 1159 1266 1160 def _validity_check_union(self, other): 1267 1161 """ … … 1271 1165 errors vectors if they were present or vectors 1272 1166 of zeros when none was found. 1273 1167 1274 1168 :param other: other data set for operation 1275 1276 1169 :return: bool 1277 1278 1170 :raise ValueError: when data types are not compatible 1279 1280 1171 """ 1281 1172 if not isinstance(other, Data2D): 1282 1173 msg = "Unable to perform operation: different types of data set" 1283 raise ValueError, msg 1174 raise ValueError, msg 1284 1175 return True 1285 1176 1286 1177 def _perform_union(self, other): 1287 1178 """ 1288 1179 Perform 2D operations between data sets 1289 1180 1290 1181 :param other: other data set 1291 1182 :param operation: function defining the operation 1292 1293 1183 """ 1294 1184 # First, check the data compatibility … … 1301 1191 result.ymax = self.ymax 1302 1192 if self.dqx_data == None or self.dqy_data == None or \ 1303 other.dqx_data == None or other.dqy_data == None 1193 other.dqx_data == None or other.dqy_data == None: 1304 1194 result.dqx_data = None 1305 1195 result.dqy_data = None … … 1309 1199 result.dqy_data = numpy.zeros(len(self.data) + \ 1310 1200 numpy.size(other.data)) 1311 1201 1312 1202 result.data = numpy.append(self.data, other.data) 1313 1203 result.qx_data = numpy.append(self.qx_data, other.qx_data)
Note: See TracChangeset
for help on using the changeset viewer.