#include "lambdaBuilder.h" #include "nbconst.h" Legendre::Legendre(r_8 x, int_4 lmax) { if (abs(x) >1 ) { throw RangeCheckError("variable for Legendre polynomials must have modules inferior to 1" ); } x_ = x; array_init(lmax); } void Legendre::array_init(int_4 lmax) { lmax_ = lmax; Pl_.ReSize(lmax_+1); Pl_(0)=1.; Pl_(1)=x_; for (int k=2; k* LambdaLMBuilder::a_recurrence_ = NULL; TriangularMatrix* LambdaLMBuilder::lam_fact_ = NULL; TVector* LambdaLMBuilder::normal_l_ = NULL; LambdaLMBuilder::LambdaLMBuilder(r_8 theta,int_4 lmax, int_4 mmax) { cth_=cos(theta); sth_=sin(theta); array_init(lmax, mmax); } void LambdaLMBuilder::array_init(int lmax, int mmax) { if (a_recurrence_ == NULL) { a_recurrence_ = new TriangularMatrix; updateArrayRecurrence(lmax); } else if ( lmax > (*a_recurrence_).rowNumber()-1 ) { cout << " WARNING : The classes LambdaXXBuilder will be more efficient if instanciated with parameter lmax = maximum value of l index which will be needed in the whole application (arrays not recomputed) " << endl; cout << "lmax= " << lmax << " previous instanciation with lmax= " << (*a_recurrence_).rowNumber() << endl; updateArrayRecurrence(lmax); } lmax_=lmax; mmax_=mmax; r_8 bignorm2 = 1.e268; // = 1e-20*1.d288 lambda_.ReSizeRow(lmax_+1); r_8 lam_mm = 1. / sqrt(4.*Pi) *bignorm2; for (int m=0; m<=mmax_;m++) { lambda_(m,m)= lam_mm / bignorm2; r_8 lam_0=0.; r_8 lam_1=1. /bignorm2 ; // r_8 a_rec = LWK->a_recurr(m,m); r_8 a_rec = (*a_recurrence_)(m,m); r_8 b_rec = 0.; for (int l=m+1; l<=lmax_; l++) { r_8 lam_2 = (cth_*lam_1-b_rec*lam_0)*a_rec; lambda_(l,m) = lam_2*lam_mm; b_rec=1./a_rec; // a_rec= LWK->a_recurr(l,m); a_rec= (*a_recurrence_)(l,m); lam_0 = lam_1; lam_1 = lam_2; } lam_mm = -lam_mm*sth_* sqrt( (2.*m+3.)/ (2.*m+2.) ); } } void LambdaLMBuilder::updateArrayRecurrence(int_4 lmax) { (*a_recurrence_).ReSizeRow(lmax+1); for (int m=0; m<=lmax;m++) { (*a_recurrence_)(m,m) = sqrt( 2.*m +3.); for (int l=m+1; l<=lmax; l++) { r_8 fl2 = (l+1.)*(l+1.); (*a_recurrence_)(l,m)=sqrt( (4.*fl2-1.)/(fl2-m*m) ); } } } void LambdaLMBuilder::updateArrayLamNorm() { (*lam_fact_).ReSizeRow(lmax_+1); for(int m = 0;m<= lmax_; m++) { for (int l=m; l<=lmax_; l++) { (*lam_fact_)(l,m) =2.*(r_8)sqrt( (2.*l+1)*(l+m)*(l-m)/(2.*l-1) ); } } (*normal_l_).ReSize(lmax_+1); (*normal_l_)(0)=0.; (*normal_l_)(1)=0.; for (int l=2; l< (*normal_l_).NElts(); l++) { (*normal_l_)(l) =(r_8)sqrt( 2./( (l+2)*(l+1)*l*(l-1) ) ); } } LambdaWXBuilder::LambdaWXBuilder(r_8 theta, int_4 lmax, int_4 mmax) : LambdaLMBuilder(theta, lmax, mmax) { array_init(); } void LambdaWXBuilder::array_init() { if (lam_fact_ == NULL || normal_l_ == NULL) { lam_fact_ = new TriangularMatrix; normal_l_ = new TVector; updateArrayLamNorm(); } else if ( lmax_ > (*lam_fact_).rowNumber()-1 || lmax_ > (*normal_l_).NElts()-1 ) { updateArrayLamNorm(); } r_8 one_on_s2 = 1. / (sth_*sth_) ; // 1/sin^2 r_8 c_on_s2 = cth_*one_on_s2; lamWlm_.ReSizeRow(lmax_+1); lamXlm_.ReSizeRow(lmax_+1); // calcul des lambda for(int m = 0;m<= mmax_; m++) { for (int l=m; l<=lmax_; l++) { lamWlm_(l,m) = 0.; lamXlm_(l,m) = 0.; } } for(int l = 2;l<= lmax_; l++) { r_8 normal_l = (*normal_l_)(l); for (int m=0; m<=l; m++) { r_8 lam_lm1m = LambdaLMBuilder::lamlm(l-1,m); r_8 lam_lm = LambdaLMBuilder::lamlm(l,m); r_8 lam_fact_l_m = (*lam_fact_)(l,m); r_8 a_w = 2. * (l - m*m) * one_on_s2 + l*(l-1.); r_8 b_w = c_on_s2 * lam_fact_l_m; r_8 a_x = 2. * cth_ * (l-1.); lamWlm_(l,m) = normal_l * ( a_w * lam_lm - b_w * lam_lm1m ); lamXlm_(l,m) = - normal_l * m* one_on_s2* ( a_x * lam_lm - lam_fact_l_m * lam_lm1m ); } } } LambdaPMBuilder::LambdaPMBuilder(r_8 theta, int_4 lmax, int_4 mmax) : LambdaLMBuilder(theta, lmax, mmax) { array_init(); } void LambdaPMBuilder::array_init() { if (lam_fact_ == NULL || normal_l_ == NULL) { lam_fact_ = new TriangularMatrix; normal_l_ = new TVector; updateArrayLamNorm(); } else if ( lmax_ > (*lam_fact_).rowNumber()-1 || lmax_ > (*normal_l_).NElts()-1 ) { updateArrayLamNorm(); } r_8 one_on_s2 = 1. / (sth_*sth_) ; r_8 c_on_s2 = cth_*one_on_s2; lamPlm_.ReSizeRow(lmax_+1); lamMlm_.ReSizeRow(lmax_+1); // calcul des lambda for(int m = 0;m<= mmax_; m++) { for (int l=m; l<=lmax_; l++) { lamPlm_(l,m) = 0.; lamMlm_(l,m) = 0.; } } for(int l = 2;l<= lmax_; l++) { r_8 normal_l = (*normal_l_)(l); for (int m=0; m<=l; m++) { r_8 lam_lm1m = LambdaLMBuilder::lamlm(l-1,m); r_8 lam_lm = LambdaLMBuilder::lamlm(l,m); r_8 lam_fact_l_m = (*lam_fact_)(l,m); r_8 a_w = 2. * (l - m*m) * one_on_s2 + l*(l-1.); r_8 f_w = lam_fact_l_m/(sth_*sth_); r_8 c_w = 2*m*(l-1.) * c_on_s2; lamPlm_(l,m) = normal_l * ( -(a_w+c_w) * lam_lm + f_w*( cth_ + m) * lam_lm1m )/Rac2; lamMlm_(l,m) = normal_l * ( -(a_w-c_w) * lam_lm + f_w*( cth_ - m) * lam_lm1m )/Rac2; } } }