1 | #!/usr/bin/env python |
---|
2 | # -*- coding: utf-8 -*- |
---|
3 | |
---|
4 | import numpy as np |
---|
5 | import pyopencl as cl |
---|
6 | from weights import GaussianDispersion |
---|
7 | |
---|
8 | def set_precision(src, qx, qy, dtype): |
---|
9 | qx = np.ascontiguousarray(qx, dtype=dtype) |
---|
10 | qy = np.ascontiguousarray(qy, dtype=dtype) |
---|
11 | if dtype == 'double': |
---|
12 | header = """\ |
---|
13 | #define real float |
---|
14 | """ |
---|
15 | else: |
---|
16 | header = """\ |
---|
17 | #pragma OPENCL EXTENSION cl_khr_fp64: enable |
---|
18 | #define real double |
---|
19 | """ |
---|
20 | return header+src, qx, qy |
---|
21 | |
---|
22 | |
---|
23 | class GpuLamellar(object): |
---|
24 | PARS = { |
---|
25 | 'scale':1, 'bi_thick':1, 'sld_bi':1e-6, 'sld_sol':0, 'background':0, |
---|
26 | } |
---|
27 | PD_PARS = {'bi_thick'} |
---|
28 | def __init__(self, qx, qy, dtype='float32'): |
---|
29 | |
---|
30 | #create context, queue, and build program |
---|
31 | self.ctx = cl.create_some_context() |
---|
32 | self.queue = cl.CommandQueue(self.ctx) |
---|
33 | src,qx,qy = set_precision(open('Kernel-Lamellar.cpp').read(), qx, qy, dtype=dtype) |
---|
34 | self.prg = cl.Program(self.ctx, src).build() |
---|
35 | self.qx, self.qy = qx, qy |
---|
36 | |
---|
37 | #buffers |
---|
38 | mf = cl.mem_flags |
---|
39 | self.qx_b = cl.Buffer(self.ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=self.qx) |
---|
40 | self.qy_b = cl.Buffer(self.ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=self.qy) |
---|
41 | self.res_b = cl.Buffer(self.ctx, mf.WRITE_ONLY, qx.nbytes) |
---|
42 | self.res = np.empty_like(self.qx) |
---|
43 | |
---|
44 | def eval(self, pars): |
---|
45 | |
---|
46 | bi_thick = GaussianDispersion(int(pars['bi_thick_pd_n']), pars['bi_thick_pd'], pars['bi_thick_pd_nsigma']) |
---|
47 | bi_thick.value, bi_thick.weight = bi_thick.get_weights(pars['bi_thick'], 0, 10000, True) |
---|
48 | |
---|
49 | sum, norm = 0.0, 0.0 |
---|
50 | sub = pars['sld_bi'] - pars['sld_sol'] |
---|
51 | |
---|
52 | real = np.float32 if self.qx.dtype == np.dtype('float32') else np.float64 |
---|
53 | for i in xrange(len(bi_thick.weight)): |
---|
54 | self.prg.LamellarKernel(self.queue, self.qx.shape, None, self.qx_b, self.qy_b, self.res_b, real(bi_thick.value[i]), |
---|
55 | real(pars['scale']), real(sub), np.uint32(self.qx.size)) |
---|
56 | cl.enqueue_copy(self.queue, self.res, self.res_b) |
---|
57 | |
---|
58 | sum += bi_thick.weight[i]*self.res |
---|
59 | norm += bi_thick.weight[i] |
---|
60 | |
---|
61 | return sum/norm + pars['background'] |
---|
62 | |
---|
63 | def demo(): |
---|
64 | |
---|
65 | from time import time |
---|
66 | import matplotlib.pyplot as plt |
---|
67 | |
---|
68 | #create qx and qy evenly spaces |
---|
69 | qx = np.linspace(-.01, .01, 128) |
---|
70 | qy = np.linspace(-.01, .01, 128) |
---|
71 | qx, qy = np.meshgrid(qx, qy) |
---|
72 | |
---|
73 | #saved shape of qx |
---|
74 | r_shape = qx.shape |
---|
75 | |
---|
76 | #reshape for calculation; resize as float32 |
---|
77 | qx = qx.flatten() |
---|
78 | qy = qy.flatten() |
---|
79 | |
---|
80 | pars = LamellarParameters(scale=1, bi_thick=100, sld_bi=.291e-6, sld_sol=5.77e-6, background=0) |
---|
81 | t = time() |
---|
82 | result = GpuLamellar(qx, qy) |
---|
83 | result.x = result.lamellar_fit(pars, b_n=35, b_w=.1, sigma=3) |
---|
84 | result.x = np.reshape(result.x, r_shape) |
---|
85 | tt = time() |
---|
86 | |
---|
87 | print("Time taken: %f" % (tt - t)) |
---|
88 | |
---|
89 | f = open("r.txt", "w") |
---|
90 | for x in xrange(len(r_shape)): |
---|
91 | f.write(str(result.x[x])) |
---|
92 | f.write("\n") |
---|
93 | |
---|
94 | plt.pcolormesh(np.log10(result.x)) |
---|
95 | plt.show() |
---|
96 | |
---|
97 | |
---|
98 | if __name__ == "__main__": |
---|
99 | demo() |
---|
100 | |
---|
101 | |
---|
102 | |
---|
103 | |
---|
104 | |
---|
105 | |
---|
106 | |
---|
107 | |
---|
108 | |
---|
109 | |
---|
110 | |
---|
111 | |
---|
112 | |
---|
113 | |
---|
114 | |
---|
115 | |
---|
116 | |
---|
117 | |
---|
118 | |
---|
119 | |
---|
120 | |
---|
121 | |
---|
122 | |
---|
123 | |
---|
124 | |
---|
125 | |
---|
126 | |
---|
127 | |
---|
128 | |
---|
129 | |
---|
130 | |
---|
131 | |
---|