Rivet analyses referenceALEPH_1996_I415745Polarization of $\Lambda^0$ baryons at LEP 1Experiment: ALEPH (LEP) Inspire ID: 415745 Status: VALIDATED Authors:
Beam energies: (45.6, 45.6) GeV Run details:
Measurement of the polarization of $\Lambda^0$ baryons at LEP 1. The $\cos\theta$ and $\cos\phi$ distributions are extracted and then the polarization obtained by fitting the resulting distributions. Source code: ALEPH_1996_I415745.cc 1// -*- C++ -*-
2#include "Rivet/Analysis.hh"
3#include "Rivet/Projections/Beam.hh"
4#include "Rivet/Projections/UnstableParticles.hh"
5#include "Rivet/Projections/Thrust.hh"
6#include "Rivet/Projections/ChargedFinalState.hh"
7
8namespace Rivet {
9
10
11 /// @brief Lambda polarization at LEP1
12 class ALEPH_1996_I415745 : public Analysis {
13 public:
14
15 /// Constructor
16 RIVET_DEFAULT_ANALYSIS_CTOR(ALEPH_1996_I415745);
17
18
19 /// @name Analysis methods
20 /// @{
21
22 /// Book histograms and initialise projections before the run
23 void init() {
24
25 // Initialise and register projections
26 declare(Beam(), "Beams");
27 const ChargedFinalState cfs;
28 const Thrust thrust(cfs);
29 declare(thrust, "Thrust");
30 declare(UnstableParticles(), "UFS");
31
32 // Book histograms
33 book(_h_ctheta, {0.1, 0.15, 0.2, 0.3, 0.4, 1.});
34 for (size_t i = 0; i < _h_ctheta->numBins(); ++i) {
35 book(_h_ctheta->bin(i+1), "/TMP/ctheta_"+to_string(i), 20, -1.0, 1.0);
36 }
37 book(_h_ctheta_large,"/TMP/ctheta_large",20,-1.,1.);
38
39 book(_h_plus_cphi, {0.3, 0.6, 0.9, 1.2, 1.5});
40 book(_h_minus_cphi, {0.3, 0.6, 0.9, 1.2, 1.5});
41 for (size_t i = 0; i < _h_plus_cphi->numBins(); ++i) {
42 book(_h_plus_cphi->bin(i+1), "/TMP/cphiP_0_"+to_string(i), 10, 0., 1.);
43 book(_h_minus_cphi->bin(i+1), "/TMP/cphiM_0_"+to_string(i), 10, 0., 1.);
44 }
45 book(_h_plus_cphi_low , "/TMP/cphiP_low" ,10,0.,1.);
46 book(_h_plus_cphi_mid , "/TMP/cphiP_mid" ,10,0.,1.);
47 book(_h_plus_cphi_high , "/TMP/cphiP_high",10,0.,1.);
48 book(_h_minus_cphi_low , "/TMP/cphiM_low" ,10,0.,1.);
49 book(_h_minus_cphi_mid , "/TMP/cphiM_mid" ,10,0.,1.);
50 book(_h_minus_cphi_high, "/TMP/cphiM_high",10,0.,1.);
51
52 book(_h_plus_lam, {0.1, 0.15, 0.2, 0.3, 0.4, 1.});
53 book(_h_minus_lam, {0.1, 0.15, 0.2, 0.3, 0.4, 1.});
54 for (size_t i = 0; i < _h_plus_lam->numBins(); ++i) {
55 book(_h_plus_lam->bin(i+1), "/TMP/lamP_0_"+to_string(i), 20, -1., 1.);
56 book(_h_minus_lam->bin(i+1), "/TMP/lamM_0_"+to_string(i), 20, -1., 1.);
57 }
58 }
59
60
61 /// Perform the per-event analysis
62 void analyze(const Event& event) {
63
64 // Get beams and average beam momentum
65 const ParticlePair& beams = apply<Beam>(event, "Beams").beams();
66 const double meanBeamMom = ( beams.first.p3().mod() +
67 beams.second.p3().mod() ) / 2.0;
68 Vector3 beamAxis;
69 if (beams.first.pid()==-11) {
70 beamAxis = beams.first .momentum().p3().unit();
71 }
72 else {
73 beamAxis = beams.second.momentum().p3().unit();
74 }
75
76 MSG_DEBUG("Avg beam momentum = " << meanBeamMom);
77 // thrust, to define an axis
78 const Thrust& thrust = apply<Thrust>(event, "Thrust");
79 const UnstableParticles& ufs = apply<UnstableParticles>(event, "UFS");
80
81 for (const Particle & lambda : ufs.particles(Cuts::abspid==3122)) {
82 double z = lambda.momentum().p3().mod()/meanBeamMom;
83 int sign = lambda.pid()/3122;
84 Vector3 axis1 = lambda.momentum().p3().unit();
85 // assymetry
86 double cLam = axis1.dot(beamAxis);
87 if(sign>0)
88 _h_plus_lam->fill(z,cLam);
89 else
90 _h_minus_lam->fill(z,cLam);
91 if(lambda.children().size()!=2) continue;
92 // look at the decay products
93 Particle proton,pion;
94 if(lambda.children()[0].pid()==sign*2212 &&
95 lambda.children()[1].pid()==-sign*211) {
96 proton = lambda.children()[0];
97 pion = lambda.children()[1];
98 }
99 else if(lambda.children()[1].pid()==sign*2212 &&
100 lambda.children()[0].pid()==-sign*211) {
101 proton = lambda.children()[1];
102 pion = lambda.children()[0];
103 }
104 else
105 continue;
106 // boost to the lambda rest frame
107 LorentzTransform boost = LorentzTransform::mkFrameTransformFromBeta(lambda.momentum().betaVec());
108 FourMomentum pproton = boost.transform(proton.momentum());
109 // longitudinal polarization
110 double ctheta = axis1.dot(pproton.p3().unit());
111 _h_ctheta->fill(z,ctheta);
112 if(z>=0.3) _h_ctheta_large->fill(ctheta);
113
114 // transverse polarization
115 if (z>0.15) {
116 Vector3 axis2;
117 if(lambda.momentum().p3().dot(thrust.thrustAxis())>=0.) {
118 axis2 = thrust.thrustAxis();
119 }
120 else {
121 axis2 =-thrust.thrustAxis();
122 }
123 Vector3 axis3 = axis2.cross(axis1).unit();
124
125 double pT = sqrt(sqr(thrust.thrustMajorAxis().dot(lambda.momentum().p3()))+
126 sqr(thrust.thrustMinorAxis().dot(lambda.momentum().p3())));
127 double cPhi = axis3.dot(pproton.p3().unit());
128 if(cPhi>0.) {
129 _h_plus_cphi->fill(pT,cPhi);
130 if(pT>0.3)
131 _h_plus_cphi_low->fill(cPhi);
132 if(pT>0.6)
133 _h_plus_cphi_mid->fill(cPhi);
134 if(pT>1.5)
135 _h_plus_cphi_high->fill(cPhi);
136 }
137 else {
138 _h_minus_cphi->fill(pT,abs(cPhi));
139 if(pT>0.3)
140 _h_minus_cphi_low->fill(abs(cPhi));
141 if(pT>0.6)
142 _h_minus_cphi_mid->fill(abs(cPhi));
143 if(pT>1.5)
144 _h_minus_cphi_high->fill(abs(cPhi));
145 }
146 }
147 }
148 }
149
150 pair<double,double> calcAlpha(Histo1DPtr hist) {
151 if (hist->numEntries()==0.) return make_pair(0.,0.);
152 double sum1(0.),sum2(0.);
153 for (const auto& bin : hist->bins() ) {
154 double Oi = bin.sumW();
155 if(Oi==0.) continue;
156 double ai = 0.5*(bin.xMax()-bin.xMin());
157 double bi = 0.5*ai*(bin.xMax()+bin.xMin());
158 double Ei = bin.errW();
159 sum1 += sqr(bi/Ei);
160 sum2 += bi/sqr(Ei)*(Oi-ai);
161 }
162 return make_pair(sum2/sum1,sqrt(1./sum1));
163 }
164
165 pair<double,double> calcAsymmetry(Estimate1DPtr hist, unsigned int mode) {
166 double sum1(0.),sum2(0.);
167 for (const auto& bin : hist->bins() ) {
168 double Oi = bin.val();
169 if(Oi==0.) continue;
170 double bi;
171 if (mode==0)
172 bi = 0.25*(bin.xMax()-bin.xMin())*(bin.xMax()+bin.xMin());
173 else
174 bi = 4.*(bin.xMax()+bin.xMin())/(3.+sqr(bin.xMax())+bin.xMax()*bin.xMin()+sqr(bin.xMin()));
175 double Ei = bin.errAvg();
176 sum1 += sqr(bi/Ei);
177 sum2 += bi/sqr(Ei)*Oi;
178 }
179 return make_pair(sum2/sum1,sqrt(1./sum1));
180 }
181
182 /// Normalise histograms etc., after the run
183 void finalize() {
184 // longitudinal polarization
185 unsigned int ipoint=0;
186 double aLam = 0.642;
187 Estimate1DPtr h_long;
188 book(h_long,1,1,1);
189 for (auto& hist : _h_ctheta->bins()) {
190 normalize(hist);
191 pair<double,double> alpha = calcAlpha(hist);
192 alpha.first /=aLam;
193 alpha.second /=aLam;
194 h_long->bin(ipoint+1).set(alpha.first, alpha.second);
195 ++ipoint;
196 }
197 normalize(_h_ctheta_large);
198 pair<double,double> alpha = calcAlpha(_h_ctheta_large);
199 alpha.first /=aLam;
200 alpha.second /=aLam;
201 Estimate1DPtr h_long_l;
202 book(h_long_l,1,2,1);
203 h_long_l->bin(1).set(alpha.first, alpha.second);
204 // transverse polarization
205 Estimate1DPtr h_trans;
206 book(h_trans,2,1,1);
207 for (size_t ix=0; ix < _h_plus_cphi->numBins(); ++ix) {
208 normalize(_h_plus_cphi->bin(ix+1));
209 normalize(_h_minus_cphi->bin(ix+1));
210 Estimate1DPtr sTemp;
211 book(sTemp, "/TMP/a_cphi_"+to_string(ix),10,0.,1.);
212 asymm(_h_plus_cphi->bin(ix+1), _h_minus_cphi->bin(ix+1), sTemp);
213 pair<double,double> alpha = calcAsymmetry(sTemp,0);
214 alpha.first /=aLam;
215 alpha.second /=aLam;
216 h_trans->bin(ix+1).set(alpha.first, alpha.second);
217 }
218 Estimate1DPtr sLow;
219 book(sLow,"/TMP/a_cphi_low",10,0.,1.);
220 asymm(_h_plus_cphi_low, _h_minus_cphi_low, sLow);
221 alpha = calcAsymmetry(sLow,0);
222 alpha.first /=aLam;
223 alpha.second /=aLam;
224 Estimate1DPtr h_trans_low;
225 book(h_trans_low,2,3,1);
226 h_trans_low->bin(1).set(alpha.first, alpha.second);
227
228 Estimate1DPtr sMid;
229 book(sMid,"/TMP/a_cphi_mid",10,0.,1.);
230 asymm(_h_plus_cphi_mid,_h_minus_cphi_mid,sMid);
231 alpha = calcAsymmetry(sMid,0);
232 alpha.first /=aLam;
233 alpha.second /=aLam;
234 Estimate1DPtr h_trans_mid;
235 book(h_trans_mid,2,4,1);
236 h_trans_mid->bin(1).set(alpha.first, alpha.second);
237
238 Estimate1DPtr sHigh;
239 book(sHigh,"/TMP/a_cphi_high",10,0.,1.);
240 asymm(_h_plus_cphi_high,_h_minus_cphi_high,sHigh);
241 alpha = calcAsymmetry(sHigh,0);
242 alpha.first /=aLam;
243 alpha.second /=aLam;
244 Estimate1DPtr h_trans_high;
245 book(h_trans_high,2,2,1);
246 h_trans_high->bin(1).set(alpha.first, alpha.second);
247
248 // asyymetry
249 Estimate1DPtr h_asym;
250 book(h_asym,3,1,1);
251 for (size_t ix=0; ix < _h_plus_lam->numBins(); ++ix) {
252 normalize(_h_plus_lam->bin(ix+1));
253 normalize(_h_minus_lam->bin(ix+1));
254 Estimate1DPtr sTemp;
255 book(sTemp, "/TMP/a_lam_"+to_string(ix), 20, -1., 1.);
256 asymm(_h_plus_lam->bin(ix+1), _h_minus_lam->bin(ix+1), sTemp);
257 pair<double,double> alpha = calcAsymmetry(sTemp,1);
258 h_asym->bin(ix+1).set(alpha.first, alpha.second);
259 }
260 }
261
262 /// @}
263
264
265 /// @name Histograms
266 /// @{
267 Histo1DGroupPtr _h_ctheta,_h_plus_cphi,_h_minus_cphi,_h_plus_lam,_h_minus_lam;
268 Histo1DPtr _h_ctheta_large;
269 Histo1DPtr _h_minus_cphi_low, _h_minus_cphi_mid, _h_minus_cphi_high;
270 Histo1DPtr _h_plus_cphi_low, _h_plus_cphi_mid, _h_plus_cphi_high;
271 /// @}
272
273
274 };
275
276
277 RIVET_DECLARE_PLUGIN(ALEPH_1996_I415745);
278
279
280}
|