Rivet analyses referenceOPAL_1997_I447188Polarization of $\Lambda^0$ baryons at LEP 1Experiment: OPAL (LEP) Inspire ID: 447188 Status: VALIDATED Authors:
Beam energies: (45.6, 45.6) GeV Run details:
Measurement of the polarization of $\Lambda^0$ baryons at LEP 1. The $\cos\theta$ and $\cos\phi$ distributions are extracted and then the polarization obtained by fitting the resulting distributions. Source code: OPAL_1997_I447188.cc 1// -*- C++ -*-
2#include "Rivet/Analysis.hh"
3#include "Rivet/Projections/Beam.hh"
4#include "Rivet/Projections/UnstableParticles.hh"
5#include "Rivet/Projections/Thrust.hh"
6#include "Rivet/Projections/ChargedFinalState.hh"
7
8namespace Rivet {
9
10
11 /// @brief Lambda polarization at LEP1
12 class OPAL_1997_I447188 : public Analysis {
13 public:
14
15 /// Constructor
16 RIVET_DEFAULT_ANALYSIS_CTOR(OPAL_1997_I447188);
17
18
19 /// @name Analysis methods
20 /// @{
21
22 /// Book histograms and initialise projections before the run
23 void init() {
24
25 // Initialise and register projections
26 declare(Beam(), "Beams");
27 const ChargedFinalState cfs;
28 const Thrust thrust(cfs);
29 declare(thrust, "Thrust");
30 declare(UnstableParticles(), "UFS");
31
32 // Book the histograms
33 const vector<double> edges{0.027, 0.05, 0.08, 0.09, 0.1, 0.15, 0.2, 0.3, 0.4, 1.0};
34 book(_h_ctheta, edges); size_t iy = 1;
35 for (auto& b : _h_ctheta->bins()) {
36 if (b.index() == 2 || b.index() == 5 || b.index() == 7) {
37 book(b, 4, 1, iy++);
38 }
39 else {
40 book(b , "/TMP/ctheta_"+std::to_string(b.index()-1), 20 , -1.0, 1.0);
41 }
42 }
43 book(_h_ctheta_large, 4, 1, 4);
44
45 book(_h_cphi, {0.0, 0.3, 0.6, 0.9, 1.2, 1.5});
46 book(_h_cphi->bin(1), "/TMP/cphiP_0", 10, 0.0, 1.0);
47 book(_h_cphi->bin(2), 5, 1, 1);
48 book(_h_cphi->bin(3), 5, 1, 2);
49 book(_h_cphi->bin(4), "/TMP/cphiP_3", 10, 0.0, 1.0);
50 book(_h_cphi->bin(5), "/TMP/cphiP_4", 10, 0.0, 1.0);
51
52 book(_h_cphi_low, 5, 1, 4);
53 book(_h_cphi_mid, "/TMP/cphiP_mid", 10, 0.0, 1.0);
54 book(_h_cphi_high, 5, 1, 3);
55
56 book(_h_plus_lam, edges);
57 book(_h_minus_lam, edges);
58 for (size_t ix=0; ix<_h_plus_lam->numBins(); ++ix) {
59 book(_h_plus_lam->bin(ix+1), "/TMP/lamP_"+std::to_string(ix), 20, -1.0, 1.0);
60 book(_h_minus_lam->bin(ix+1), "/TMP/lamM_"+std::to_string(ix), 20, -1.0, 1.0);
61 }
62 book(_h_plus_lam_large1, "/TMP/lamP_large_1",20,-1.,1.);
63 book(_h_plus_lam_large2, "/TMP/lamP_large_2",20,-1.,1.);
64 book(_h_minus_lam_large1, "/TMP/lamM_large_1",20,-1.,1.);
65 book(_h_minus_lam_large2, "/TMP/lamM_large_2",20,-1.,1.);
66 }
67
68
69 /// Perform the per-event analysis
70 void analyze(const Event& event) {
71
72 // Get beams and average beam momentum
73 const ParticlePair& beams = apply<Beam>(event, "Beams").beams();
74 const double meanBeamMom = ( beams.first.p3().mod() + beams.second.p3().mod() ) / 2.0;
75 Vector3 beamAxis;
76 if (beams.first.pid()==-11) {
77 beamAxis = beams.first .momentum().p3().unit();
78 }
79 else {
80 beamAxis = beams.second.momentum().p3().unit();
81 }
82
83 MSG_DEBUG("Avg beam momentum = " << meanBeamMom);
84 // thrust, to define an axis
85 const Thrust& thrust = apply<Thrust>(event, "Thrust");
86 const UnstableParticles& ufs = apply<UnstableParticles>(event, "UFS");
87
88 for (const Particle & lambda : ufs.particles(Cuts::abspid==3122)) {
89 double xE = lambda.momentum().t()/meanBeamMom;
90 int sign = lambda.pid()/3122;
91 Vector3 axis1 = lambda.momentum().p3().unit();
92 // assymetry
93 double cLam = axis1.dot(beamAxis);
94 if (sign>0)
95 _h_plus_lam->fill(xE,cLam);
96 else {
97 _h_minus_lam->fill(xE,cLam);
98 }
99 if(xE>0.15) {
100 if (sign>0)
101 _h_plus_lam_large1 ->fill(cLam);
102 else {
103 _h_minus_lam_large1->fill(cLam);
104 }
105 }
106 if (xE>0.3) {
107 if (sign>0)
108 _h_plus_lam_large2 ->fill(cLam);
109 else {
110 _h_minus_lam_large2->fill(cLam);
111 }
112 }
113 if(lambda.children().size()!=2) continue;
114 // look at the decay products
115 Particle proton,pion;
116 if (lambda.children()[0].pid()==sign*2212 && lambda.children()[1].pid()==-sign*211) {
117 proton = lambda.children()[0];
118 pion = lambda.children()[1];
119 }
120 else if (lambda.children()[1].pid()==sign*2212 && lambda.children()[0].pid()==-sign*211) {
121 proton = lambda.children()[1];
122 pion = lambda.children()[0];
123 }
124 else {
125 continue;
126 }
127 // boost to the lambda rest frame
128 LorentzTransform boost = LorentzTransform::mkFrameTransformFromBeta(lambda.momentum().betaVec());
129 FourMomentum pproton = boost.transform(proton.momentum());
130 // longitudinal polarization
131 double ctheta = axis1.dot(pproton.p3().unit());
132 _h_ctheta->fill(xE,ctheta);
133 if(xE>=0.3) _h_ctheta_large->fill(ctheta);
134 // transverse polarization
135 Vector3 axis2;
136 if (lambda.momentum().p3().dot(thrust.thrustAxis())>=0.) {
137 axis2 = thrust.thrustAxis();
138 }
139 else {
140 axis2 =-thrust.thrustAxis();
141 }
142 Vector3 axis3 = axis2.cross(axis1).unit();
143 double pT = sqrt(sqr(thrust.thrustMajorAxis().dot(lambda.momentum().p3()))+
144 sqr(thrust.thrustMinorAxis().dot(lambda.momentum().p3())));
145 double cPhi = axis3.dot(pproton.p3().unit());
146 _h_cphi->fill(pT,cPhi);
147 if(pT>0.3) _h_cphi_low->fill(cPhi);
148 if(pT>0.6) _h_cphi_mid->fill(cPhi);
149 if(pT>1.5) _h_cphi_high->fill(cPhi);
150 }
151 }
152
153 pair<double,double> calcAlpha(Histo1DPtr hist) {
154 if (hist->numEntries()==0.) return make_pair(0.,0.);
155 double sum1(0.),sum2(0.);
156 for (const auto& bin : hist->bins()) {
157 double Oi = bin.sumW();
158 if(Oi==0.) continue;
159 double ai = 0.5*(bin.xMax()-bin.xMin());
160 double bi = 0.5*ai*(bin.xMax()+bin.xMin());
161 double Ei = bin.errW();
162 sum1 += sqr(bi/Ei);
163 sum2 += bi/sqr(Ei)*(Oi-ai);
164 }
165 return make_pair(sum2/sum1,sqrt(1./sum1));
166 }
167
168 pair<double,double> calcAsymmetry(Estimate1DPtr hist,unsigned int mode) {
169 double sum1(0.),sum2(0.);
170 for (const auto& bin : hist->bins()) {
171 double Oi = bin.val();
172 if(Oi==0.) continue;
173 double bi;
174 if(mode==0)
175 bi = 0.25*(bin.xMax()-bin.xMin())*(bin.xMax()+bin.xMin());
176 else
177 bi = 4.*(bin.xMax()+bin.xMin())/(3.+sqr(bin.xMax())+bin.xMax()*bin.xMin()+sqr(bin.xMin()));
178 double Ei = bin.errAvg();
179 sum1 += sqr(bi/Ei);
180 sum2 += bi/sqr(Ei)*Oi;
181 }
182 return make_pair(sum2/sum1,sqrt(1./sum1));
183 }
184
185
186 /// Normalise histograms etc., after the run
187 void finalize() {
188 // longitudinal polarization
189 double aLam = 0.642;
190 Estimate1DPtr h_long;
191 book(h_long, 1, 1, 1);
192 for (auto& hist : _h_ctheta->bins()) {
193 normalize(hist);
194 pair<double,double> alpha = calcAlpha(hist);
195 alpha.first /=aLam;
196 alpha.second /=aLam;
197 h_long->bin(hist.index()).set(100.*alpha.first, 100.*alpha.second);
198 }
199 normalize(_h_ctheta_large);
200 pair<double,double> alpha = calcAlpha(_h_ctheta_large);
201 alpha.first /=aLam;
202 alpha.second /=aLam;
203 Estimate1DPtr h_long_l;
204 book(h_long_l,1,1,2);
205 h_long_l->bin(1).set(100.*alpha.first, 100.*alpha.second);
206 // transverse polarization
207 Estimate1DPtr h_trans;
208 book(h_trans,2,1,1);
209 for (auto& hist : _h_cphi->bins()) {
210 normalize(hist);
211 pair<double,double> alpha = calcAlpha(hist);
212 alpha.first /=aLam;
213 alpha.second /=aLam;
214 h_trans->bin(hist.index()).set(100.*alpha.first, 100.*alpha.second);
215 }
216 normalize(_h_cphi_low);
217 alpha = calcAlpha(_h_cphi_low);
218 alpha.first /=aLam;
219 alpha.second /=aLam;
220 Estimate1DPtr h_trans_low;
221 book(h_trans_low,2,1,2);
222 h_trans_low->bin(1).set(alpha.first, 100.*alpha.second);
223 normalize(_h_cphi_mid);
224 alpha = calcAlpha(_h_cphi_mid);
225 alpha.first /=aLam;
226 alpha.second /=aLam;
227 Estimate1DPtr h_trans_mid;
228 book(h_trans_mid,2,1,3);
229 h_trans_mid->bin(1).set(alpha.first, 100.*alpha.second);
230 normalize(_h_cphi_high);
231 alpha = calcAlpha(_h_cphi_high);
232 alpha.first /=aLam;
233 alpha.second /=aLam;
234 Estimate1DPtr h_trans_high;
235 book(h_trans_high,2,1,4);
236 h_trans_high->bin(1).set(alpha.first, 100.*alpha.second);
237 // asyymetry
238 Estimate1DPtr h_asym;
239 book(h_asym, 3, 1, 1);
240 for (size_t ix=0; ix < _h_plus_lam->numBins(); ++ix) {
241 normalize(_h_plus_lam->bin(ix+1));
242 normalize(_h_minus_lam->bin(ix+1));
243 Estimate1DPtr eTemp;
244 book(eTemp, "/TMP/a_lam_" + to_string(ix), 20, -1.0, 1.0);
245 asymm(_h_plus_lam->bin(ix+1), _h_minus_lam->bin(ix+1), eTemp);
246 pair<double,double> alpha = calcAsymmetry(eTemp, 1);
247 h_asym->bin(ix+1).set(-alpha.first, alpha.second);
248 }
249 normalize(_h_plus_lam_large1 );
250 normalize(_h_minus_lam_large1);
251 Estimate1DPtr eTemp;
252 book(eTemp, "/TMP/a_lam_large1", 20, -1.0, 1.0);
253 asymm(_h_plus_lam_large1,_h_minus_lam_large1,eTemp);
254 alpha = calcAsymmetry(eTemp,1);
255 book(h_asym,3,1,2);
256 h_asym->bin(1).set(-alpha.first, alpha.second);
257 normalize(_h_plus_lam_large2 );
258 normalize(_h_minus_lam_large2);
259 book(eTemp, "/TMP/a_lam_large2", 20, -1.0, 1.0);
260 asymm(_h_plus_lam_large2,_h_minus_lam_large2, eTemp);
261 alpha = calcAsymmetry(eTemp,1);
262 book(h_asym,3,1,3);
263 h_asym->bin(1).set(-alpha.first, alpha.second);
264 }
265
266 /// @}
267
268
269 /// @name Histograms
270 /// @{
271 Histo1DGroupPtr _h_ctheta,_h_cphi,_h_plus_lam,_h_minus_lam;
272 Histo1DPtr _h_ctheta_large;
273 Histo1DPtr _h_cphi_low, _h_cphi_mid, _h_cphi_high;
274 Histo1DPtr _h_plus_lam_large1,_h_plus_lam_large2;
275 Histo1DPtr _h_minus_lam_large1,_h_minus_lam_large2;
276 /// @}
277
278 };
279
280
281 RIVET_DECLARE_PLUGIN(OPAL_1997_I447188);
282
283
284}
|