rivet is hosted by Hepforge, IPPP Durham

Rivet analyses reference

ATLAS_2023_I2648096

dileptonic ttbar at 13 TeV
Experiment: ATLAS (LHC)
Inspire ID: 2648096
Status: VALIDATED
Authors:
  • Christian Gutschow
References: Beams: p+ p+
Beam energies: (6500.0, 6500.0) GeV
Run details:
  • dileptonic top-quark pair production

Differential and double-differential distributions of kinematic variables of leptons from decays of top-quark pairs ($t\bar{t}$) are measured using the full LHC Run 2 data sample collected with the ATLAS detector. The data were collected at a pp collision energy of $\sqrt{s}=13$ TeV and correspond to an integrated luminosity of 140 fb$^{-1}$. The measurements use events containing an oppositely charged $e\mu$ pair and $b$-tagged jets. The results are compared with predictions from several Monte Carlo generators. While no prediction is found to be consistent with all distributions, a better agreement with measurements of the lepton $p_\text{T}$ distributions is obtained by reweighting the $t\bar{t}$ sample so as to reproduce the top-quark $p_\text{T}$ distribution from an NNLO calculation. The inclusive top-quark pair production cross-section is measured as well, both in a fiducial region and in the full phase-space. The total inclusive cross-section is found to be $\sigma_{t\bar{t}$ = 829\pm 1\text{(stat)}\pm 13\text{(syst)}\pm 8\text{(lumi)}\pm 2\text{(beam)}$ pb, where the uncertainties are due to statistics, systematic effects, the integrated luminosity and the beam energy. This is in excellent agreement with the theoretical expectation.

Source code: ATLAS_2023_I2648096.cc
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/VetoedFinalState.hh"
#include "Rivet/Projections/PromptFinalState.hh"
#include "Rivet/Projections/InvisibleFinalState.hh"
#include "Rivet/Projections/DressedLeptons.hh"
#include "Rivet/Projections/FastJets.hh"
#include "Rivet/Tools/BinnedHistogram.hh"

namespace Rivet {

  /// @brief lepton differential ttbar analysis at 13 TeV
  class ATLAS_2023_I2648096 : public Analysis {
  public:

    RIVET_DEFAULT_ANALYSIS_CTOR(ATLAS_2023_I2648096);

    void init() {

      Cut eta_full = Cuts::abseta < 5.0 && Cuts::pT > 1.0*MeV;

      // Get photons to dress leptons
      FinalState photons(Cuts::abspid == PID::PHOTON);

      // Projection to find the electrons
      PromptFinalState prompt_el(Cuts::abspid == PID::ELECTRON, true);
      DressedLeptons elecs(photons, prompt_el, 0.1, (Cuts::abseta < 2.5 && Cuts::pT > 25*GeV));
      DressedLeptons veto_elecs(photons, prompt_el, 0.1, eta_full, false);
      declare(elecs, "elecs");

      // Projection to find the muons
      PromptFinalState prompt_mu(Cuts::abspid == PID::MUON, true);
      DressedLeptons muons(photons, prompt_mu, 0.1, (Cuts::abseta < 2.5 && Cuts::pT > 25*GeV));
      DressedLeptons veto_muons(photons, prompt_mu, 0.1, eta_full, false);
      declare(muons, "muons");

      const InvisibleFinalState invis(true, true);
      VetoedFinalState vfs;
      vfs.addVetoOnThisFinalState(veto_elecs);
      vfs.addVetoOnThisFinalState(veto_muons);
      vfs.addVetoOnThisFinalState(invis);
      FastJets jets(vfs, FastJets::ANTIKT, 0.4, JetAlg::Muons::ALL, JetAlg::Invisibles::ALL);
      declare(jets, "jets");

      // Book histograms
      bookHistos("lep_pt",       6);
      bookHistos("lep_eta",      9);
      bookHistos("dilep_sumE",  12);
      bookHistos("dilep_mass",  15);
      bookHistos("dilep_sumpt", 18);
      bookHistos("dilep_pt",    21);
      bookHistos("dilep_dphi",  24);
      bookHistos("dilep_rap",   27);

      // unrolled 2D distributions - 2nd-dim bin edges must be specified
      const vector<double> b_mass_2D = { 0., 70., 100., 130., 200., 800. };
      const vector<double> b_pt_2D = { 0., 40., 65., 100. };
      const vector<double> b_sumE_2D = { 0., 110., 140., 200., 250., 900. };

      bookHisto2D("dilep_rap_mass",  78, b_mass_2D);
      bookHisto2D("dilep_dphi_mass", 79, b_mass_2D);
      bookHisto2D("dilep_dphi_pt",   80, b_pt_2D);
      bookHisto2D("dilep_dphi_sumE", 81, b_sumE_2D);
    }

    void analyze(const Event& event) {
      vector<DressedLepton> elecs = apply<DressedLeptons>(event, "elecs").dressedLeptons();
      vector<DressedLepton> muons = apply<DressedLeptons>(event, "muons").dressedLeptons();
      Jets jets = apply<FastJets>(event, "jets").jetsByPt(Cuts::pT > 25*GeV && Cuts::abseta < 2.5);

      // Check overlap of jets/leptons.
      for (const Jet& jet : jets) {
        ifilter_discard(elecs, deltaRLess(jet, 0.4));
        ifilter_discard(muons, deltaRLess(jet, 0.4));
      }
      if (elecs.empty() || muons.empty())  vetoEvent;
      if (elecs[0].charge() == muons[0].charge())  vetoEvent;

      FourMomentum el = elecs[0].momentum();
      FourMomentum mu = muons[0].momentum();
      FourMomentum ll = elecs[0].momentum() + muons[0].momentum();

      if (max(el.pT(), mu.pT()) < 27*GeV)  vetoEvent;

      const double dphi = deltaPhi(el,mu);
      const double drap = ll.absrap();
      const double pt   = ll.pT()/GeV;
      const double sumE = min((el.E()+mu.E())/GeV, 899);
      const double mass = min(ll.mass()/GeV, 799.);

      // Fill histograms
      // include explicit overflow protection as last bins are inclusive
      fillHistos("lep_pt",      min(el.pT()/GeV, 349.));
      fillHistos("lep_pt",      min(mu.pT()/GeV, 349.));
      fillHistos("lep_eta",     el.abseta());
      fillHistos("lep_eta",     mu.abseta());
      fillHistos("dilep_pt",    min(pt, 299.));
      fillHistos("dilep_mass",  mass);
      fillHistos("dilep_rap",   drap);
      fillHistos("dilep_dphi",  dphi);
      fillHistos("dilep_sumpt", min((el.pT()+mu.pT())/GeV, 599));
      fillHistos("dilep_sumE",  sumE);

      // Fill unrolled 2D histograms vs mass
      fillHisto2D("dilep_rap_mass",  mass, drap);
      fillHisto2D("dilep_dphi_mass", mass, dphi);
      fillHisto2D("dilep_dphi_pt",   min(pt, 99.), dphi);
      fillHisto2D("dilep_dphi_sumE", sumE, dphi);
    }

    void finalize() {
      // Normalize to cross-section
      const double sf = crossSection()/femtobarn/sumOfWeights();

      // finalisation of 1D histograms
      for (auto& hist : _h) {
        const double norm = 1.0 / hist.second->integral();
        // histogram normalisation
        if (hist.first.find("norm") != string::npos)  scale(hist.second, norm);
        else  scale(hist.second, sf);
      }

      // finalisation of 2D histograms
      for (auto& hist : _h_multi) {
        if (hist.first.find("_norm") != std::string::npos) {
          // scaling for normalised distribution according integral of whole set
          double norm2D = integral2D(hist.second);
          hist.second.scale(1./norm2D, this);
        }
        else {
          // scaling for non-normalised distribution
          hist.second.scale(sf, this);
        }
      }
    }


  private:

    /// @name Histogram helper functions
    //@{
    void bookHistos(const string& name, const unsigned int id) {
      book(_h[name], id, 1, 1);
      book(_h["norm_" + name], id+36, 1, 1);
    }

    void fillHistos(const string& name, const double value) {
      _h[name]->fill(value);
      _h["norm_" + name]->fill(value);
    }

    void bookHisto2D(const string& name, const unsigned int id, const vector<double>& axis1) {
      size_t nBins = axis1.size()-1;
      for (size_t i=0; i < nBins; ++i) {
        Histo1DPtr tmp;
        _h_multi[name].add(axis1[i], axis1[i+1], book(tmp, id, 1, i+1));
        _h_multi[name + "_norm"].add(axis1[i], axis1[i+1], book(tmp, id+4, 1, i+1));
      }
    }



    void fillHisto2D(const string& name, const double val1, const double val2) {
      _h_multi[name].fill(val1, val2);
      _h_multi[name+"_norm"].fill(val1, val2);
    }


    double integral2D(BinnedHistogram& h_multi) {
        double total_integral = 0;
        for  (Histo1DPtr& h : h_multi.histos()) {
          total_integral += h->integral(false);
        }
        return total_integral;
      }


    // pointers to 1D and 2D histograms
    map<string, Histo1DPtr> _h;
    map<string, BinnedHistogram> _h_multi;
    //@}
    // acceptance counter

  };

  // Declare the class as a hook for the plugin system
  RIVET_DECLARE_PLUGIN(ATLAS_2023_I2648096);
}