source: Sophya/trunk/SophyaLib/Samba/lambdaBuilder.cc@ 738

Last change on this file since 738 was 729, checked in by ansari, 26 years ago

addition des Ylm etc.

File size: 5.6 KB
Line 
1#include "lambdaBuilder.h"
2#include "nbconst.h"
3
4
5Legendre::Legendre(r_8 x, int_4 lmax)
6{
7 if (abs(x) >1 )
8 {
9 throw RangeCheckError("variable for Legendre polynomials must have modules inferior to 1" );
10 }
11 x_ = x;
12 array_init(lmax);
13}
14void Legendre::array_init(int_4 lmax)
15{
16 lmax_ = lmax;
17 Pl_.ReSize(lmax_+1);
18 Pl_(0)=1.;
19 Pl_(1)=x_;
20 for (int k=2; k<Pl_.NElts(); k++)
21 {
22 Pl_(k) = ( (2.*k-1)*x_*Pl_(k-1)-(k-1)*Pl_(k-2) )/k;
23 }
24}
25
26TriangularMatrix<r_8>* LambdaLMBuilder::a_recurrence_ = NULL;
27TriangularMatrix<r_8>* LambdaLMBuilder::lam_fact_ = NULL;
28TVector<r_8>* LambdaLMBuilder::normal_l_ = NULL;
29
30LambdaLMBuilder::LambdaLMBuilder(r_8 theta,int_4 lmax, int_4 mmax)
31 {
32 cth_=cos(theta);
33 sth_=sin(theta);
34 array_init(lmax, mmax);
35 }
36void LambdaLMBuilder::array_init(int lmax, int mmax)
37 {
38 if (a_recurrence_ == NULL)
39 {
40 a_recurrence_ = new TriangularMatrix<r_8>;
41 updateArrayRecurrence(lmax);
42 }
43 else
44 if ( lmax > (*a_recurrence_).rowNumber()-1 )
45 {
46 cout << " WARNING : The classes LambdaXXBuilder will be more efficient if instanciated with parameter lmax = maximum value of l index which will be needed in the whole application (arrays not recomputed) " << endl;
47 cout << "lmax= " << lmax << " previous instanciation with lmax= " << (*a_recurrence_).rowNumber() << endl;
48 updateArrayRecurrence(lmax);
49 }
50 lmax_=lmax;
51 mmax_=mmax;
52 r_8 bignorm2 = 1.e268; // = 1e-20*1.d288
53
54 lambda_.ReSizeRow(lmax_+1);
55
56 r_8 lam_mm = 1. / sqrt(4.*Pi) *bignorm2;
57
58 for (int m=0; m<=mmax_;m++)
59 {
60
61
62 lambda_(m,m)= lam_mm / bignorm2;
63
64 r_8 lam_0=0.;
65 r_8 lam_1=1. /bignorm2 ;
66 // r_8 a_rec = LWK->a_recurr(m,m);
67 r_8 a_rec = (*a_recurrence_)(m,m);
68 r_8 b_rec = 0.;
69 for (int l=m+1; l<=lmax_; l++)
70 {
71 r_8 lam_2 = (cth_*lam_1-b_rec*lam_0)*a_rec;
72 lambda_(l,m) = lam_2*lam_mm;
73 b_rec=1./a_rec;
74 // a_rec= LWK->a_recurr(l,m);
75 a_rec= (*a_recurrence_)(l,m);
76 lam_0 = lam_1;
77 lam_1 = lam_2;
78 }
79
80 lam_mm = -lam_mm*sth_* sqrt( (2.*m+3.)/ (2.*m+2.) );
81
82 }
83 }
84
85
86void LambdaLMBuilder::updateArrayRecurrence(int_4 lmax)
87 {
88 (*a_recurrence_).ReSizeRow(lmax+1);
89 for (int m=0; m<=lmax;m++)
90 {
91 (*a_recurrence_)(m,m) = sqrt( 2.*m +3.);
92 for (int l=m+1; l<=lmax; l++)
93 {
94 r_8 fl2 = (l+1.)*(l+1.);
95 (*a_recurrence_)(l,m)=sqrt( (4.*fl2-1.)/(fl2-m*m) );
96 }
97 }
98 }
99
100
101void LambdaLMBuilder::updateArrayLamNorm()
102 {
103 (*lam_fact_).ReSizeRow(lmax_+1);
104 for(int m = 0;m<= lmax_; m++)
105 {
106 for (int l=m; l<=lmax_; l++)
107 {
108 (*lam_fact_)(l,m) =2.*(r_8)sqrt( (2.*l+1)*(l+m)*(l-m)/(2.*l-1) );
109 }
110 }
111 (*normal_l_).ReSize(lmax_+1);
112 (*normal_l_)(0)=0.;
113 (*normal_l_)(1)=0.;
114 for (int l=2; l< (*normal_l_).NElts(); l++)
115 {
116 (*normal_l_)(l) =(r_8)sqrt( 2./( (l+2)*(l+1)*l*(l-1) ) );
117 }
118 }
119
120
121
122
123LambdaWXBuilder::LambdaWXBuilder(r_8 theta, int_4 lmax, int_4 mmax) : LambdaLMBuilder(theta, lmax, mmax)
124 {
125 array_init();
126 }
127
128
129void LambdaWXBuilder::array_init()
130 {
131 if (lam_fact_ == NULL || normal_l_ == NULL)
132 {
133 lam_fact_ = new TriangularMatrix<r_8>;
134 normal_l_ = new TVector<r_8>;
135 updateArrayLamNorm();
136 }
137 else
138 if ( lmax_ > (*lam_fact_).rowNumber()-1 || lmax_ > (*normal_l_).NElts()-1 )
139 {
140 updateArrayLamNorm();
141 }
142
143 r_8 one_on_s2 = 1. / (sth_*sth_) ; // 1/sin^2
144 r_8 c_on_s2 = cth_*one_on_s2;
145 lamWlm_.ReSizeRow(lmax_+1);
146 lamXlm_.ReSizeRow(lmax_+1);
147
148 // calcul des lambda
149 for(int m = 0;m<= mmax_; m++)
150 {
151 for (int l=m; l<=lmax_; l++)
152 {
153 lamWlm_(l,m) = 0.;
154 lamXlm_(l,m) = 0.;
155 }
156 }
157 for(int l = 2;l<= lmax_; l++)
158 {
159 r_8 normal_l = (*normal_l_)(l);
160 for (int m=0; m<=l; m++)
161 {
162 r_8 lam_lm1m = LambdaLMBuilder::lamlm(l-1,m);
163 r_8 lam_lm = LambdaLMBuilder::lamlm(l,m);
164 r_8 lam_fact_l_m = (*lam_fact_)(l,m);
165 r_8 a_w = 2. * (l - m*m) * one_on_s2 + l*(l-1.);
166 r_8 b_w = c_on_s2 * lam_fact_l_m;
167 r_8 a_x = 2. * cth_ * (l-1.);
168 lamWlm_(l,m) = normal_l * ( a_w * lam_lm - b_w * lam_lm1m );
169 lamXlm_(l,m) = - normal_l * m* one_on_s2* ( a_x * lam_lm - lam_fact_l_m * lam_lm1m );
170 }
171 }
172
173 }
174
175
176LambdaPMBuilder::LambdaPMBuilder(r_8 theta, int_4 lmax, int_4 mmax) : LambdaLMBuilder(theta, lmax, mmax)
177 {
178 array_init();
179 }
180
181
182void LambdaPMBuilder::array_init()
183 {
184 if (lam_fact_ == NULL || normal_l_ == NULL)
185 {
186 lam_fact_ = new TriangularMatrix<r_8>;
187 normal_l_ = new TVector<r_8>;
188 updateArrayLamNorm();
189 }
190 else
191 if ( lmax_ > (*lam_fact_).rowNumber()-1 || lmax_ > (*normal_l_).NElts()-1 )
192 {
193 updateArrayLamNorm();
194 }
195
196 r_8 one_on_s2 = 1. / (sth_*sth_) ;
197 r_8 c_on_s2 = cth_*one_on_s2;
198 lamPlm_.ReSizeRow(lmax_+1);
199 lamMlm_.ReSizeRow(lmax_+1);
200
201 // calcul des lambda
202 for(int m = 0;m<= mmax_; m++)
203 {
204 for (int l=m; l<=lmax_; l++)
205 {
206 lamPlm_(l,m) = 0.;
207 lamMlm_(l,m) = 0.;
208 }
209 }
210
211 for(int l = 2;l<= lmax_; l++)
212 {
213 r_8 normal_l = (*normal_l_)(l);
214 for (int m=0; m<=l; m++)
215 {
216 r_8 lam_lm1m = LambdaLMBuilder::lamlm(l-1,m);
217 r_8 lam_lm = LambdaLMBuilder::lamlm(l,m);
218 r_8 lam_fact_l_m = (*lam_fact_)(l,m);
219 r_8 a_w = 2. * (l - m*m) * one_on_s2 + l*(l-1.);
220 r_8 f_w = lam_fact_l_m/(sth_*sth_);
221 r_8 c_w = 2*m*(l-1.) * c_on_s2;
222
223 lamPlm_(l,m) = normal_l * ( -(a_w+c_w) * lam_lm + f_w*( cth_ + m) * lam_lm1m )/Rac2;
224 lamMlm_(l,m) = normal_l * ( -(a_w-c_w) * lam_lm + f_w*( cth_ - m) * lam_lm1m )/Rac2;
225 }
226 }
227
228 }
229
230
Note: See TracBrowser for help on using the repository browser.