Rivet Analyses Reference

TASSO_1989_I266893

Spectra for $\Lambda^0,\bar{\Lambda}^0$ and $\Xi^-,\bar{\Xi}^-$ at 34.8 and 42.1 GeV
Experiment: TASSO (Petra)
Inspire ID: 266893
Status: VALIDATED
Authors:
  • Peter Richardson
References:
  • Z.Phys. C45 (1989) 209, 1989
Beams: e+ e-
Beam energies: (17.4, 17.4); (21.1, 21.1) GeV
Run details:
  • e+ e- to hadrons. Beam energy must be specified as analysis option "ENERGY" when rivet-merging samples.

Measurement of the $\Lambda^0,\bar{\Lambda}^0$ and $\Xi^-,\bar{\Xi}^-$ spectra at 34.8 and 42.1 GeV by the TASSO experiment at Petra. In addition to the spectra $p_l$, $p_\perp^{\text{in}}$, $p_\perp^{\text{out}}$ and the rapidity of the $\Lambda^0,\bar{\Lambda}^0$ are measured with respect to the sphericity axis. Beam energy must be specified as analysis option "ENERGY" when rivet-merging samples.

Source code: TASSO_1989_I266893.cc
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/Beam.hh"
#include "Rivet/Projections/Sphericity.hh"
#include "Rivet/Projections/UnstableParticles.hh"
#include "Rivet/Projections/ChargedFinalState.hh"

namespace Rivet {


  /// @brief baryons at 34.8 and 42.1 GeV
  class TASSO_1989_I266893 : public Analysis {
  public:

    /// Constructor
    RIVET_DEFAULT_ANALYSIS_CTOR(TASSO_1989_I266893);

    /// @name Analysis methods
    //@{

    /// Book histograms and initialise projections before the run
    void init() {

      // Initialise and register projections
      declare(Beam(), "Beams");
      declare(UnstableParticles(), "UFS");
      const ChargedFinalState cfs;
      declare(cfs, "CFS");
      declare(Sphericity(cfs), "Sphericity");
      // Book histograms
      _ih=-1;
      sqs = 1.0;
      if(isCompatibleWithSqrtS(34.8)) {
	_ih=0;
	sqs = 34.8;
      }
      else if (isCompatibleWithSqrtS(42.1)) {
	_ih=1;
	sqs = 42.1;

      }
      else
	MSG_ERROR("Beam energy " << sqrtS() << " not supported!");

      book(_h_lam_p    ,6*_ih+3,1,1);
      book(_h_lam_pL   ,6*_ih+4,1,1);
      book(_h_lam_pTIn ,6*_ih+5,1,1);
      book(_h_lam_pTOut,6*_ih+6,1,1);
      book(_h_lam_rap  ,6*_ih+7,1,1);
      book(_h_lam_x    ,6*_ih+8,1,1);
      book(_p_lam_S_1  ,15+_ih,1,1);
      book(_p_lam_S_2  ,15+_ih,1,2);
      if(_ih==0) {
      	book(_h_xi_p    ,18,1,1);
      	book(_h_xi_pL   ,19,1,1);
      	book(_h_xi_pTIn ,20,1,1);
      	book(_h_xi_pTOut,21,1,1);
      	book(_h_xi_rap  ,22,1,1);
      	book(_h_xi_x    ,23,1,1);
      }
    }


    /// Perform the per-event analysis
    void analyze(const Event& event) {
      const ChargedFinalState& cfs = apply<ChargedFinalState>(event, "CFS");
      const size_t numParticles = cfs.particles().size();

      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
      if (numParticles < 2) {
        MSG_DEBUG("Failed leptonic event cut");
        vetoEvent;
      }
      MSG_DEBUG("Passed leptonic event cut");

      // Get beams and average beam momentum
      const ParticlePair& beams = apply<Beam>(event, "Beams").beams();
      const double meanBeamMom = ( beams.first.p3().mod() +
      				   beams.second.p3().mod() ) / 2.0;
      const Sphericity& sphericity = apply<Sphericity>(event, "Sphericity");
      unsigned int nLam(0);
      UnstableParticles ufs = apply<UnstableParticles>(event,"UFS");
      for(const Particle & p : ufs.particles(Cuts::abspid==3122 or Cuts::abspid==3312)) {
      	int id = abs(p.pid());
      	double xE = p.E()/meanBeamMom;
      	Vector3 mom3 = p.p3();
        const double energy = p.E();
      	double modp = mom3.mod();
      	double beta = modp/energy;
        const double momS = dot(sphericity.sphericityAxis(), mom3);
        const double pTinS = dot(mom3, sphericity.sphericityMajorAxis());
        const double pToutS = dot(mom3, sphericity.sphericityMinorAxis());
        const double rapidityS = 0.5 * std::log((energy + momS) / (energy - momS));
      	if(id==3122) {
      	  _h_lam_x->fill(xE,1./beta);
      	  _h_lam_p->fill(modp/GeV);
      	  _h_lam_pL   ->fill(abs(momS)/GeV  );
      	  _h_lam_pTIn ->fill(abs(pTinS)/GeV );
      	  _h_lam_pTOut->fill(abs(pToutS)/GeV);
      	  _h_lam_rap  ->fill(abs(rapidityS) );
	  ++nLam;
      	}
      	else if(_h_xi_x) {
      	  _h_xi_x->fill(xE,1./beta);
      	  _h_xi_p->fill(modp/GeV);
      	  _h_xi_pL   ->fill(abs(momS)/GeV  );
      	  _h_xi_pTIn ->fill(abs(pTinS)/GeV );
      	  _h_xi_pTOut->fill(abs(pToutS)/GeV);
      	  _h_xi_rap  ->fill(abs(rapidityS) );
      	}
      }
      double sphere = sphericity.sphericity();
      _p_lam_S_1->fill(sphere,nLam);
      _p_lam_S_2->fill(sphere,cfs.particles().size());
    }


    /// Normalise histograms etc., after the run
    void finalize() {
      scale( _h_lam_p    , crossSection()/nanobarn/sumOfWeights());
      scale( _h_lam_pL   , crossSection()/nanobarn/sumOfWeights());
      scale( _h_lam_pTIn , crossSection()/nanobarn/sumOfWeights());
      scale( _h_lam_pTOut, crossSection()/nanobarn/sumOfWeights());
      scale( _h_lam_rap  , crossSection()/nanobarn/sumOfWeights());
      scale( _h_lam_x    , sqr(sqs)*crossSection()/nanobarn/sumOfWeights());
      Scatter2DPtr temp;
      book(temp,15+_ih,1,3);
      divide(_p_lam_S_1,_p_lam_S_2,temp);
      if(_ih==0) {
      	scale( _h_xi_p    , crossSection()/nanobarn/sumOfWeights());
      	scale( _h_xi_pL   , crossSection()/nanobarn/sumOfWeights());
      	scale( _h_xi_pTIn , crossSection()/nanobarn/sumOfWeights());
      	scale( _h_xi_pTOut, crossSection()/nanobarn/sumOfWeights());
      	scale( _h_xi_rap  , crossSection()/nanobarn/sumOfWeights());
      	scale( _h_xi_x    , sqr(sqs)*crossSection()/nanobarn/sumOfWeights());
      }
    }

    //@}


    /// @name Histograms
    //@{
    Histo1DPtr _h_lam_p, _h_lam_pL, _h_lam_pTIn, _h_lam_pTOut, _h_lam_rap, _h_lam_x;
    Profile1DPtr _p_lam_S_1, _p_lam_S_2;
    Histo1DPtr _h_xi_p, _h_xi_pL, _h_xi_pTIn, _h_xi_pTOut, _h_xi_rap, _h_xi_x;
    int _ih;
    double sqs;
    //@}


  };


  // The hook for the plugin system
  RIVET_DECLARE_PLUGIN(TASSO_1989_I266893);


}