Rivet analyses referenceH1_1996_I421105Inclusive D0 and D*+- production in deep inelastic e p scattering at HERAExperiment: H1 (HERA) Inspire ID: 421105 Status: VALIDATED Authors:
Beam energies: (27.6, 820.0); (820.0, 27.6) GeV Run details:
First results on inclusive D0 and D* production in deep inelastic ep scattering are reported using data collected by the H1 experiment at HERA in 1994. Source code: H1_1996_I421105.cc 1// -*- C++ -*-
2#include "Rivet/Analysis.hh"
3#include "Rivet/Projections/FinalState.hh"
4#include "Rivet/Projections/FastJets.hh"
5#include "Rivet/Projections/DISKinematics.hh"
6#include "Rivet/Projections/UnstableParticles.hh"
7namespace Rivet {
8
9
10 /// @brief Inclusive D0 and D*+- production in deep inelastic e p scattering at HERA (H1)
11 class H1_1996_I421105 : public Analysis {
12 public:
13
14 /// Constructor
15 RIVET_DEFAULT_ANALYSIS_CTOR(H1_1996_I421105);
16
17
18 /// @name Analysis methods
19 ///@{
20
21 /// Book histograms and initialise projections before the run
22 void init() {
23
24
25 declare(DISKinematics(), "Kinematics");
26 declare(UnstableParticles(), "UFS");
27
28 // Initialise and register projections
29
30 // The basic final-state projection:
31 // all final-state particles within
32 // the given eta acceptance
33 const FinalState fs(Cuts::abseta < 4.9);
34 declare(fs, "FS");
35
36 // Book histograms
37 // take binning from reference data using HEPData ID (digits in "d01-x01-y01" etc.)
38
39
40
41 book(_h["p_tD*_norm"], 4, 1, 1);
42 book(_h["p_tD*"], 4, 1, 2);
43 book(_h["p_tD0_norm"], 5, 1, 1);
44 book(_h["p_tD0"], 5, 1, 2);
45 book(_h["xD_D*_norm"], 6, 1, 1);
46 book(_h["xD_D*"], 6, 1, 2);
47 book(_h["xD_D0_norm"], 7, 1, 1);
48 book(_h["xD_D0"], 7, 1, 2);
49
50 }
51
52
53/// Perform the per-event analysis
54 void analyze(const Event& event) {
55
56 /// @todo Do the event by event analysis here
57 const DISKinematics& dk = apply<DISKinematics>(event, "Kinematics");
58 const LorentzTransform hcmboost = dk.boostHCM();
59
60 // Get the DIS kinematics
61 double y = dk.y();
62 double W2 = dk.W2()/GeV2;
63 double Q2 = dk.Q2()/GeV;
64
65 bool cut ;
66 cut = Q2 > 10 && Q2 < 100 && y > 0.01 && y < 0.7 ;
67
68 if (! cut ) vetoEvent ;
69 bool etacut ;
70 for (const Particle& p : apply<UnstableParticles>(event, "UFS").particles()) {
71 etacut = abs(p.momentum().eta()) < 1.5 ;
72 const FourMomentum hcmMom = hcmboost.transform(p.momentum());
73 double p_D ;
74 double x_D = 0 ;
75 p_D = std::sqrt( hcmMom.px()*hcmMom.px() + hcmMom.py()*hcmMom.py() + hcmMom.pz()*hcmMom.pz() );
76 x_D = 2.*p_D/sqrt(W2);
77 if (p.abspid() == 421) {
78 _h["p_tD0"]->fill(p.momentum().pT()/GeV);
79 _h["p_tD0_norm"]->fill(p.momentum().pT()/GeV);
80 if (etacut ) _h["xD_D0"]->fill(x_D);
81 if (etacut ) _h["xD_D0_norm"]->fill(x_D);
82 }
83
84 if (p.abspid() == 413) {
85 _h["p_tD*"]->fill(p.momentum().pT()/GeV);
86 _h["p_tD*_norm"]->fill(p.momentum().pT()/GeV);
87 // x_D is defined for the D0
88 if (etacut ) _h["xD_D*"]->fill(x_D);
89 if (etacut ) _h["xD_D*_norm"]->fill(x_D);
90 }
91 }
92
93 }
94 /// Normalise histograms etc., after the run
95 void finalize() {
96
97
98 scale(_h["p_tD*"], crossSection()/nanobarn/sumW());
99 scale(_h["p_tD0"], crossSection()/nanobarn/sumW());
100 normalize(_h["p_tD*_norm"]);
101 normalize(_h["p_tD0_norm"]);
102
103 scale(_h["xD_D*"], crossSection()/nanobarn/sumW());
104 scale(_h["xD_D0"], crossSection()/nanobarn/sumW());
105 normalize(_h["xD_D*_norm"]);
106 normalize(_h["xD_D0_norm"]);
107
108
109 }
110
111 ///@}
112
113
114 /// @name Histograms
115 ///@{
116 map<string, Histo1DPtr> _h;
117 ///@}
118
119 };
120
121
122 RIVET_DECLARE_PLUGIN(H1_1996_I421105);
123
124}
|