#define TopTree_cxx #include "TopTree.h" #include "TH2.h" #include "TProfile.h" #include "TStyle.h" #include "TCanvas.h" #include #include #include #include #include //pdf #include "Electroweak/cteq6/cteq6/TCteq6.hh" #include "Electroweak/pythia/pythia/TG3Pythia6.hh" // global variables - the easy way to program // hardcoding the size of the array is kludgy, but it works // first index is number of pdf sets. 2nd index is max number of events Float_t Eventpdfwt[46][50000] = {0.0}; Long_t myentries=0; using std::cin; using std::cout; using std::endl; #include using std::setw; void TopTree::Loop() { if (fChain == 0) return; Int_t nentries = Int_t(fChain->GetEntries()); Int_t nbytes=0, nb=0; myentries = nentries; //pdf some plots related to pdf weights TH1F* h_pdfratio = new TH1F("h_pdfratio", " pdf wt ratio ", 150,0.,15.); TH1F* h_cutpdfratio = new TH1F("h_cutpdfratio", " pdf wt ratio ", 150,0.,15.); TH1F* h_pdfratio30 = new TH1F("h_pdfratio30", " pdf wt ratio ", 150,0.,15.); TH1F* h_cutpdfratio30 = new TH1F("h_cutpdfratio30", " pdf wt ratio ", 150,0.,15.); TProfile* h_pdfwts = new TProfile("h_pdfwts", " Pdfwts ", 46,0.,46., 0., 5.); TProfile* h_cutpdfwts = new TProfile("h_cutpdfwts", " Cut Events Pdfwts ", 46,0.,46., 0., 5.); TProfile* h_cteqpdfwts = new TProfile("h_cteqpdfwts", " Pdfwts ", 41,0.,41., 0., 5.); TProfile* h_cutcteqpdfwts = new TProfile("h_cutcteqpdfwts", " Cut Events Pdfwts ", 41,0.,41., 0., 5.); //pdf - Int_t MAXPDF = 46; Float_t Eventpdfratio[46] = {0.0}; Float_t CutEventpdfratio[46] = {0.0}; Int_t ncutpdfevents=0; //myentries = 100; // for testing only look at 100 events //pdf- call function to get Eventpdfwt filled for(Int_t pdfnum = 0; pdfnumGetEntry(jentry); nbytes += nb; // if (Cut(ientry) < 0) continue; //pdf // get ratio of weights for all events before any cuts // store this in a profile histogram for(Int_t pdfnum = 0; pdfnum 5)denom = Eventpdfwt[5][jentry]; // compare cteq6m eigenvectors to cteq6m default if(pdfnum==2)denom = Eventpdfwt[1][jentry]; // compare 2 MRST pdf's with different alpha_s values (MRST 72, MRST 75) if(pdfnum==4)denom = Eventpdfwt[3][jentry]; //compare CTEQ6L1 to CTEQ6L these have different alpha_s values Eventpdfratio[pdfnum] +=Eventpdfwt[pdfnum][jentry]/denom; h_pdfwts->Fill(pdfnum,Eventpdfwt[pdfnum][jentry]/denom); if(pdfnum>4)h_cteqpdfwts->Fill(pdfnum-5,Eventpdfwt[pdfnum][jentry]/Eventpdfwt[5][jentry]); h_pdfratio->Fill(Eventpdfwt[1][jentry]/Eventpdfwt[0][jentry]); h_pdfratio30->Fill(Eventpdfwt[36][jentry]/Eventpdfwt[0][jentry]); } // make event selection cuts here if(summary_fnTightLepton[0] == 1 && summary_fnLooseLepton[0] == 0 // dilepton veto && summary_fnTightJet[0] > 2 && !(summary_fTopEventClass[0]&0x8) // Z veto && summary_fmuoMet[0] > 20.) { // get pdf weights after cuts for(Int_t pdfnum = 0; pdfnum 5)denom = Eventpdfwt[5][jentry]; if(pdfnum==2)denom = Eventpdfwt[1][jentry]; if(pdfnum==4)denom = Eventpdfwt[3][jentry]; CutEventpdfratio[pdfnum] +=Eventpdfwt[pdfnum][jentry]/denom; h_cutpdfwts->Fill(pdfnum,Eventpdfwt[pdfnum][jentry]/denom); if(pdfnum>4)h_cutcteqpdfwts->Fill(pdfnum-5,Eventpdfwt[pdfnum][jentry]/Eventpdfwt[5][jentry]); h_cutpdfratio->Fill(Eventpdfwt[1][jentry]/Eventpdfwt[0][jentry]); h_cutpdfratio30->Fill(Eventpdfwt[36][jentry]/Eventpdfwt[0][jentry]); } ncutpdfevents++; }// end of cuts on event } // end of loop over events // printout summary information and save the histograms in a file for(Int_t pdfnum = 0; pdfnumGetMean() << " +- " << h_pdfratio->GetRMS()/sqrt(h_pdfratio->GetEntries()) << endl; cout << " Mean of distribution is " << h_cutpdfratio->GetMean() << " +- " << h_cutpdfratio->GetRMS()/sqrt(h_cutpdfratio->GetEntries()) << endl; TFile *fn = new TFile("Pdferr_hists.root", "RECREATE"); // TFile *fn = new TFile("hists_ttbar_iso.root", "NEW"); //TFile *fn = new TFile("savedhists2.root", "UPDATE"); //fn->Write(); //to write all the plots h_pdfwts->Write(); h_cutpdfwts->Write(); h_cteqpdfwts->Write(); h_cutcteqpdfwts->Write(); h_pdfratio->Write(); h_cutpdfratio->Write(); h_pdfratio30->Write(); h_cutpdfratio30->Write(); h_cutcteqpdfwts->Divide(h_cteqpdfwts); //pdf - get the errors for the pdf acceptance // note: This code can be run as a separate root macro on the above made histograms // steal Pasha's? code from http://www-cdf.fnal.gov/internal/physics/ewk/WZCS/wpdf4.cpp // to get acceptance uncertainty // This is the electroweak prescription for summing the 40 cteq eigenvectors double a1[20]; double a2[20]; double a0 = 1.0; for (Int_t i = 0; i < 20; i++) { a1[i] = h_cutcteqpdfwts->GetBinContent(2.*i+2); a2[i] = h_cutcteqpdfwts->GetBinContent(2.*i+3); cout <<" bin" << i << " a1 " << a1[i] << " a2 " << a2[i] << endl; } double temp1 = 0.; double temp2 = 0.; double sumpls = 0.; double summin = 0.; int i = 0; while (i<20) { temp1 = a1[i] - a0; temp2 = a2[i] - a0; if ((temp1 <= 0.) && (temp2 <= 0.)) { summin = summin + (0.5 * ((temp1 * temp1) + (temp2 * temp2))); } else if ((temp1 > 0.) && (temp2 > 0.)) { sumpls = sumpls + (0.5 * ((temp1 * temp1) + (temp2 * temp2))); } else if ((temp1 > 0.) && (temp2 <= 0.)) { sumpls = sumpls + (temp1 * temp1); summin = summin + (temp2 * temp2); } else if ((temp1 <= 0.) && (temp2 > 0.)) { sumpls = sumpls + (temp2 * temp2); summin = summin + (temp1 * temp1); } else { cout << "Error :: No Valid Category !! " << "\n"; } i++; } double mindeltaA = sqrt(summin); double plsdeltaA = sqrt(sumpls); // this is the error using the cteq eigenvectors cout << "Positive PDF Error (fraction) = " << plsdeltaA << "\n"; cout << "Negative PDF Error (fractio) = " << mindeltaA << "\n"; // compare mrst72 and cteq5l h_cutpdfwts->Divide(h_pdfwts); // ratio of weights after/before cuts cout << " Ratio of MRST72 to CTEQ5L PDF's: cross section " << h_pdfwts->GetBinContent(2) << " acceptance: " << h_cutpdfwts->GetBinContent(2) << endl; cout << " Ratio of MRST75 to MRST72 PDF's: cross section " << h_pdfwts->GetBinContent(3) << " acceptance: " << h_cutpdfwts->GetBinContent(3) << endl; cout << " Ratio of CTEQ6L1 to CTEQ6L PDF's: cross section " << h_pdfwts->GetBinContent(5) << " acceptance: " << h_cutpdfwts->GetBinContent(5) << endl; cout << " Ratio of CTEQ6M (NLO) to CTEQ5L (LO) PDF's: cross section " << h_pdfwts->GetBinContent(6) << " acceptance: " << h_cutpdfwts->GetBinContent(6) << endl; // end of code to printout pdf uncertainty estimate } /**************** // get_pdf gets the pdf weights and fills the global array Eventpdfwt[index][jentry] index is the index to the array, but also matches a specific pdf set jentry is the entry for that event in the ntuple the weight is the pdf value of the 2 incoming partons multiplied together. To get the pdf weight requires the x of the parton and the Q of the event One issue is whether it is the x value of the parton before or after ISR. For Pythia, hepg particles 2 and 3 are before ISR and 4 and 5 are after ISR The Q value for the event will depend on how the event was generated. This is currently written to match the Pythia ttopei ttbar sample. The approach here is to loop over all of the events and fill Eventpdfwt[index][jentry] Because the pdf sets require reading in tables from files, we do 1 set at a time. For 46 different pdfs, that means looping through the ntuple at least 46 different times Using LHAPDF should allow us to hold all the pdf sets in memory at one time which means we could loop through the ntuple just once. Index PDFSet ----- --------- 0 CTEQ5L 1 MRST72 2 MRST75 3 CTEQ6L 4 CTEQ6L1 5 CTEQ6M 5+i CTEQ6M eigenvector i ****************/ void TopTree::get_pdf(Int_t index) { //Get the pdf weight for the event Int_t ISet = 3; //LO cteq6L cout << " Getting PDF weights for index " << index << endl; // For using PDFlib cteq5l pdf's double uval, dval, usea, dsea, str, chm, bot, top, glu; Int_t ngroup, nset; TG3Pythia6* py = TG3Pythia6::Instance(); if(index==0) { ISet = 4046; // for cteq5l see Electroweak/scripts/test_pdflib.C ngroup = ISet/1000; nset = ISet%1000; cout << " ISet " << ISet << " ngroup " << ngroup << " nset " << nset; printf (" ngroup, nset: %3i %3i\n",ngroup, nset); py->Pdfset("NPTYPE",1); py->Pdfset("NSET" ,nset); py->Pdfset("NGROUP",ngroup); // need to call this 2nd for some reason } if(index==1) { ISet = 3072; // for mrst72 ngroup = ISet/1000; nset = ISet%1000; cout << " ISet " << ISet << " ngroup " << ngroup << " nset " << nset; printf (" ngroup, nset: %3i %3i\n",ngroup, nset); py->Pdfset("NPTYPE",1); py->Pdfset("NSET" ,nset); py->Pdfset("NGROUP",ngroup); // need to call this 2nd for some reason } if(index==2) { ISet = 3075; // for mrst75 ngroup = ISet/1000; nset = ISet%1000; cout << " ISet " << ISet << " ngroup " << ngroup << " nset " << nset; printf (" ngroup, nset: %3i %3i\n",ngroup, nset); py->Pdfset("NPTYPE",1); py->Pdfset("NSET" ,nset); py->Pdfset("NGROUP",ngroup); // need to call this 2nd for some reason } // CTEQ info TCteq6* cteq6 = TCteq6::Instance(); if(index>2) { ISet = index; // for cteq6L, cteq6L1 if(index>4)ISet = index+95; // index==5 corresponds to ISET 100 = default cteq6m cteq6->Setctq6(ISet); } // IMPORTANT: Choosing which particles in the list of hepg particles // are the incoming partons is important and is dependent on the // MC. The default I use here is for Pythia // first loop over events just to get the default pdf wt //Int_t part1 = 2; // - partons before ISR //Int_t part2 = 3; Int_t part1 = 4; // valid for pythia - partons after ISR Int_t part2 = 5; /* // for Alpgen samples with Herwig showering Int_t part1 = 3; Int_t part2 = 4; */ Int_t nbytes=0, nb=0; // do I need this?? for (Int_t jentry=0; jentry!=myentries; ++jentry) { Int_t ientry = LoadTree(jentry); // in case of a TChain, ientry is // the entry number in the current file nb = fChain->GetEntry(jentry);// do I need this?? nbytes += nb; // if (Cut(ientry) < 0) continue; //IMPORTANT: The other piece of this is the Q^2 for the //event. This also depends on the on MC used and the specific //process. Again the example below is for Pythia ttbar production // virtuality of incoming partons ( I think this is correct) // I'm doing this before isr to start with Float_t P1 = pow(hepg_E[part1],2)-pow(hepg_Px[part1],2)-pow(hepg_Py[part1],2)-pow(hepg_Pz[part1],2); Float_t P2 = pow(hepg_E[part2],2)-pow(hepg_Px[part2],2)-pow(hepg_Py[part2],2)-pow(hepg_Pz[part2],2); Float_t P12pt = pow(hepg_Px[part1]+hepg_Px[part2],2)+ pow(hepg_Py[part1]+hepg_Py[part2],2); //Q^2 scale is Q2 = [ Pt^2 + (P1^2 + P2^2 + 2*mtop^2)/2 ] * PARP(67) (= 4) //See pythia manual 6.2, page 189 //MSTP(32) = 8. TLorentzVector p1(hepg_Px[part1],hepg_Py[part1],hepg_Pz[part1],hepg_E[part1]); TLorentzVector p2(hepg_Px[part2],hepg_Py[part2],hepg_Pz[part2],hepg_E[part2]); // The two outgoing top quarks are partons 6 and 7 in pythia TLorentzVector p3(hepg_Px[6],hepg_Py[6],hepg_Pz[6],hepg_E[6]); TLorentzVector p4(hepg_Px[7],hepg_Py[7],hepg_Pz[7],hepg_E[7]); //now get pthat - the pt of the outgoing top quark in the cm system // see page 94 of the Pythia 6.2 manual for the math TLorentzVector shatV = p1+p2; Double_t shat = shatV.M2(); // shat = shat*shat; TLorentzVector thatV = p1-p3; Double_t that = thatV.M2(); // that =that*that; Double_t costhetahat = 2.*(that+shat/2.)/shat; Double_t mt1 = p3.M2(); Double_t mt2 = p4.M2(); Double_t b34 = sqrt(pow(1.-mt1/shat-mt2/shat,2)-4.*mt1*mt2/(shat*shat)); if(!(b34 > 0. && b34 < 10.)) // protect against occasional nan { cout << "shat " << shat << " that " << that << " b34 " << b34 << " costheta " << costhetahat << endl; b34 = 0.5; } Double_t pthat = sqrt(shat)*b34*sqrt(1.-costhetahat*costhetahat)/2.; Double_t Qscale = (pthat*pthat+(P1+P2+2.*175.*175)/2.); if(!(Qscale > 0. && Qscale < 4000000.)) // protect against weird values { Qscale = 175.*175.; cout << " pthat " << pthat << " Q2 " << Qscale << endl; } // Now we get the 2 weights for the 2 partons - which depends on // the type of the incoming parton, their x value and the Q^2 // value of the process Double_t tot_pdfwt = 1.0; // for the 2 pdf's multiplied together. Double_t pdfval = 0.0; // for(Int_t np=0; np 0)){ptype = 1;} if(abs(hepg_ID[np])==2 && ((hepg_ID[np]*hepg_IDparent[np]) < 0)){ptype = -1;} if(abs(hepg_ID[np])==1 && ((hepg_ID[np]*hepg_IDparent[np]) > 0)){ptype = 2; } if(abs(hepg_ID[np])==1 && ((hepg_ID[np]*hepg_IDparent[np]) < 0)){ptype = -2; } // for pdflib u=2 and d=1 but I correct for this below if(abs(hepg_ID[np])==3 && ((hepg_ID[np]*hepg_IDparent[np]) > 0)){ptype = 3; } if(abs(hepg_ID[np])==3 && ((hepg_ID[np]*hepg_IDparent[np]) < 0)){ptype = -3;} if(abs(hepg_ID[np])==4 && ((hepg_ID[np]*hepg_IDparent[np]) > 0)){ptype = 4; } if(abs(hepg_ID[np])==4 && ((hepg_ID[np]*hepg_IDparent[np]) < 0)){ptype = -4;} if(abs(hepg_ID[np])==21 ){ptype = 0;} if(abs(hepg_ID[np])==5)cout << " Found a b quark in proton: particle " << np << endl; // cout << "pdf for particle " << np << " dtyp " << hepg_ID[np] << " with x is " << partx << " pdf is " << cteq6->Ctq6pdf(ptype,partx,100.) << endl; if(npdfpartons > 2) cout << " too many initial state partons " << endl; // for pdflib x*f(x) is returned for everything if(index<3) { py->Structm(partx,Qscale,uval,dval,usea,dsea,str,chm,bot,top,glu); if(ptype==2)pdfval = (dval+dsea)/partx; if(ptype==-2)pdfval = (dsea)/partx; if(ptype==1)pdfval = (uval+usea)/partx; if(ptype==-1)pdfval = (usea)/partx; if(abs(ptype)==3)pdfval = (str)/partx; if(abs(ptype)==4)pdfval = (chm)/partx; if(abs(ptype)==5)pdfval = (bot)/partx; if(ptype==0)pdfval = (glu)/partx; } if(index>2) { pdfval= cteq6->Ctq6pdf(ptype,partx,Qscale); } tot_pdfwt *= pdfval; if(tot_pdfwt>50.) cout << "Index " << index << " Total pdf wt " << tot_pdfwt << " parton type " << ptype << " part x " << partx << endl; } } // cout << "Total pdf wt " << tot_pdfwt << endl; Eventpdfwt[index][jentry] = tot_pdfwt; } }