doFlowSumAll.C 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449
  1. ///////////////////////////////////////////////////////////////////////////////
  2. //
  3. // $Id: doFlowSumAll.C,v 1.6 2011/07/25 15:54:50 posk Exp $
  4. //
  5. // Makes root.files.<cenNo> files containing lists of flow.hist.root files
  6. // in the specified subdirectory/link of outDir (which is a link in this directory).
  7. // Adds the histograms together for all files for all specified centralities.
  8. // The CenNo is actually the baseRunNo + the centrality.
  9. // First adds all histograms, then, for some, does it again for weighted averages,
  10. // for LYZ including in the error the spread from the different batch jobs.
  11. // Thus, for LYZ do only one centrality at a time
  12. // Last file is the output file.
  13. // This macro must be in your working directory.
  14. // if just AnalysisMaker set nNames = 6
  15. //
  16. // by Art Poskanzer and Kirill Filimonov
  17. //
  18. ///////////////////////////////////////////////////////////////////////////////
  19. #include <iostream.h>
  20. #include <fstream.h>
  21. #include "TObject.h"
  22. #include "TH1.h"
  23. #include "TH2.h"
  24. #include "TFile.h"
  25. #include "TKey.h"
  26. #include "TChain.h"
  27. void doFlowSumAll(Int_t firstCenNo, Int_t lastCenNo, char* dirName = "", Int_t outputRunNo=99) {
  28. const int nSels = 2;
  29. const int nHars = 4; // 4
  30. bool LYZ = kFALSE;
  31. bool reCent = kFALSE;
  32. char rootFileName[80];
  33. char logFileName[80];
  34. char listFileName[80];
  35. char rootOutName[80];
  36. char logOutName[80];
  37. char logTitle[80];
  38. char lsCommand[80];
  39. char cpCommand[80];
  40. char logTitleCommand[80];
  41. char logCommand[80];
  42. char firstPassCommand[80];
  43. char secondPassCommand[80];
  44. char rmCommand[80];
  45. TFile* histFile[1000];
  46. Float_t j01 = 2.405;
  47. // names of histograms to be added with weighting (not including Obs hists)
  48. const char* baseName[] = {
  49. "Flow_Res_",
  50. "Flow_v2D_", // must be before vEta and vPt
  51. "Flow_vEta_",
  52. "Flow_vPt_",
  53. "Flow_v_",
  54. "Flow_q_",
  55. "Flow_v2D_ScalarProd_",
  56. "Flow_vEta_ScalarProd_",
  57. "Flow_vPt_ScalarProd_",
  58. "Flow_v_ScalarProd_",
  59. "Flow_Cumul_vEta_Order2_",
  60. "Flow_Cumul_vPt_Order2_",
  61. "Flow_Cumul_v_Order2_",
  62. "Flow_Cumul_vEta_Order4_",
  63. "Flow_Cumul_vPt_Order4_",
  64. "Flow_Cumul_v_Order4_",
  65. "FlowLYZ_r0theta_",
  66. "FlowLYZ_Vtheta_", // must come after r0theta
  67. "FlowLYZ_V_",
  68. "FlowLYZ_vr0_",
  69. "FlowLYZ_vEta_",
  70. "FlowLYZ_vPt_",
  71. "FlowLYZ_v_",
  72. "FlowLYZ_Gtheta0_",
  73. "FlowLYZ_Gtheta1_",
  74. "FlowLYZ_Gtheta2_",
  75. "FlowLYZ_Gtheta3_",
  76. "FlowLYZ_Gtheta4_"
  77. };
  78. //const int nNames = sizeof(baseName) / sizeof(char*);
  79. const int nNames = 6; // if just AnalysisMaker
  80. // open the output log file
  81. sprintf(logOutName, "ana%2d.log", outputRunNo);
  82. sprintf(logTitle, " Combination of centralities %2d to %2d", firstCenNo, lastCenNo);
  83. sprintf(logTitleCommand, "echo %s > %s", logTitle, logOutName);
  84. system(logTitleCommand);
  85. // open the input files
  86. Int_t nFile = 0;
  87. for (int nc = firstCenNo; nc <= lastCenNo; nc++) {
  88. sprintf(listFileName, "root.files.%2d", nc);
  89. //sprintf(lsCommand, "ls -1 outDir/*-%2d/flow.hist.root > %s", nc, listFileName);
  90. sprintf(lsCommand, "ls -1 outDir/%s*-%2d/flow.hist.root > %s", dirName, nc, listFileName);
  91. system(lsCommand);
  92. sprintf(cpCommand, "cat %s >> %s", listFileName, logOutName);
  93. system(cpCommand);
  94. fstream ascii_in;
  95. ascii_in.open(listFileName, ios::in);
  96. while(!ascii_in.eof()) {
  97. ascii_in >> rootFileName;
  98. if (strstr(rootFileName,".root")==0) continue;
  99. histFile[nFile] = new TFile(rootFileName);
  100. char* cp = strstr(rootFileName, "flow.");
  101. if (cp) *cp = '\0'; // truncate to directory name
  102. sprintf(firstPassCommand, "test -f %sflowPhiWgtNew.hist.root", rootFileName);
  103. sprintf(secondPassCommand, "test -f %sflowPhiWgt.hist.root", rootFileName);
  104. if (LYZ) sprintf(secondPassCommand, "test -f %sflow.firstPassLYZNew.root", rootFileName);
  105. if (reCent) {
  106. sprintf(firstPassCommand, "test -f %sflow.reCentAnaNew.root", rootFileName);
  107. sprintf(secondPassCommand, "test -f %sflow.reCentAna.root", rootFileName);
  108. }
  109. if (system(firstPassCommand) && system(secondPassCommand)) {
  110. cout << "####################################" << endl;
  111. cout << "### No 2nd pass for " << rootFileName << endl;
  112. cout << "####################################" << endl;
  113. delete histFile[nFile];
  114. continue;
  115. }
  116. cout << nFile+1 << " directory name = " << rootFileName << endl;
  117. sprintf(logFileName, "%sana.log", rootFileName);
  118. sprintf(logCommand, "cat %s >> %s", logFileName, logOutName);
  119. system(logCommand);
  120. nFile++;
  121. }
  122. }
  123. const Int_t nFiles = nFile;
  124. cout << endl;
  125. cout << "input files: " << nFiles << endl;
  126. if (!nFiles) {
  127. cout << "#### no files" << endl;
  128. return;
  129. }
  130. TH2* yieldPartHist[nFiles];
  131. TH1* hist[nFiles+1];
  132. TH1* yieldPartHistPt[nFiles];
  133. TH1* yieldPartHistEta[nFiles];
  134. Float_t yieldTotal[nFiles];
  135. TH1* hist_r0theta[nSels][nHars];
  136. // open the output file
  137. sprintf(rootOutName, "ana%2d.root", outputRunNo);
  138. cout << " output file name = " << rootOutName << endl;
  139. histFile[nFiles] = new TFile(rootOutName, "RECREATE"); // last file is new out file
  140. cout << " log file name = " << logOutName << endl << endl;
  141. // add all histograms with no weighting
  142. // iterate over the histograms in file[0] and add all the others to it
  143. // write output to file[nFiles]
  144. TKey* key;
  145. TObject* obj;
  146. const char* objName = 0;
  147. TIter nextkey(histFile[0]->GetListOfKeys());
  148. while ((key = (TKey*)nextkey())) {
  149. histFile[0]->cd();
  150. //key->ls();
  151. obj = key->ReadObj();
  152. if (obj->InheritsFrom("TH1")) { // TH1 or TProfile
  153. hist[0] = (TH1*)obj;
  154. objName = key->GetName();
  155. cout << " hist name= " << objName << endl;
  156. for (int n = 1; n < nFiles; n++) {
  157. hist[1] = (TH1*)histFile[n]->Get(objName);
  158. hist[0]->Add(hist[1]);
  159. }
  160. }
  161. histFile[nFiles]->cd();
  162. obj->Write(objName);
  163. delete obj;
  164. obj = NULL;
  165. }
  166. // get yield histograms
  167. for (int n = 0; n < nFiles; n++) {
  168. yieldPartHist[n] = dynamic_cast<TH2*>(histFile[n]->Get("Flow_YieldPart2D"));
  169. yieldPartHistPt[n] = dynamic_cast<TH1*>(histFile[n]->Get("FlowLYZ_YieldPartPt"));
  170. yieldPartHistEta[n] = dynamic_cast<TH1*>(histFile[n]->Get("FlowLYZ_YieldPartEta"));
  171. if (yieldPartHistPt[n]) { yieldTotal[n] = yieldPartHistPt[n]->Integral(); }
  172. }
  173. cout << endl << "with weighting" << endl;
  174. TH1F* yieldY;
  175. TH1F* yieldPt;
  176. for (int pageNumber = 0; pageNumber < nNames; pageNumber++ ) {
  177. nextPage:
  178. bool twoD = kFALSE;
  179. if (strstr(baseName[pageNumber],"v2D")) twoD = kTRUE;
  180. for (int selN = 0; selN < nSels; selN++) {
  181. // no harmonics
  182. bool noHars = kFALSE;
  183. int nHar = nHars;
  184. if (strstr(baseName[pageNumber],"_v_")!=0 ||
  185. strcmp(baseName[pageNumber],"Flow_Cos_")==0 ||
  186. strstr(baseName[pageNumber],"Res")!=0 ||
  187. strstr(baseName[pageNumber],"_V_")!=0 ||
  188. strstr(baseName[pageNumber],"_vr0_")!=0) {
  189. noHars = kTRUE;
  190. nHar = 1;
  191. }
  192. // two harmonics
  193. if (strstr(baseName[pageNumber],"theta_")!=0) {
  194. nHar = 2;
  195. }
  196. for (int harN = 0; harN < nHar; harN++) {
  197. // construct histName
  198. TString histName(baseName[pageNumber]);
  199. histName += "Sel";
  200. histName += selN+1;
  201. if (!noHars) {
  202. histName += "_Har";
  203. histName += harN+1;
  204. }
  205. if (strstr(histName.Data(), "_Gtheta") && harN > 1) { continue; }
  206. // get the histograms
  207. for (int n = 0; n < nFiles+1; n++) {
  208. hist[n] = dynamic_cast<TH1*>(histFile[n]->Get(histName));
  209. if (!hist[n]) {
  210. if (pageNumber < nNames) {
  211. pageNumber++;
  212. goto nextPage;
  213. } else {
  214. cout << endl;
  215. cout << "*** Error: Correct nNames in macro" << endl;
  216. }
  217. } else if (n == 0) {
  218. cout << " hist name= " << histName << endl;
  219. }
  220. }
  221. // get the r0theta output hist
  222. if (strstr(histName.Data(), "r0theta")) {
  223. hist_r0theta[selN][harN] = dynamic_cast<TH1*>(histFile[nFiles]->Get(histName));
  224. }
  225. // get the number of bins for this hist
  226. if (!hist[0]) { continue; }
  227. int nBins; // set by 2D
  228. int xBins = hist[0]->GetNbinsX();
  229. int yBins = 0.;
  230. if (twoD) {
  231. yBins = hist[0]->GetNbinsY();
  232. nBins = xBins + (xBins + 2) * yBins;
  233. float yMax = hist[0]->GetXaxis()->GetXmax();
  234. float yMin = hist[0]->GetXaxis()->GetXmin();
  235. yieldY = new TH1F("Yield_Y", "Yield_Y", xBins, yMin, yMax);
  236. yieldY->SetXTitle("Rapidity");
  237. yieldY->SetYTitle("Counts");
  238. float ptMax = hist[0]->GetYaxis()->GetXmax();
  239. yieldPt = new TH1F("Yield_Pt", "Yield_Pt", yBins, 0., ptMax);
  240. yieldPt->SetXTitle("Pt (GeV/c)");
  241. yieldPt->SetYTitle("Counts");
  242. } else {
  243. nBins = xBins + 2;
  244. }
  245. // loop over the bins
  246. if (strstr(baseName[pageNumber],"LYZ")) {
  247. cout << " with LYZ yield weighted averaging" << endl;
  248. double v, yield, content, error, Vtheta, VthetaErr, r0, r0Err;
  249. double vSum, v2Sum, error2Sum, yieldSum, yieldSum2, yield2Sum;
  250. //for (int bin = 0; bin < nBins; bin++) {
  251. for (int bin = 1; bin < nBins-1; bin++) {
  252. v = 0.;
  253. vSum = 0.;
  254. v2Sum = 0.;
  255. content = 0.;
  256. error = 0.;
  257. error2Sum = 0.;
  258. yield = 0.;
  259. yieldSum = 0.;
  260. yield2Sum = 0.;
  261. for (int n = 0; n < nFiles; n++) {
  262. if (hist[n]) {
  263. if (strstr(histName.Data(), "vEta")) {
  264. yield = yieldPartHistEta[n]->GetBinContent(bin);
  265. } else if (strstr(histName.Data(), "vPt")) {
  266. yield = yieldPartHistPt[n]->GetBinContent(bin);
  267. } else { // r0theta, Vtheta, _V_, vr0, _v_, Gtheta
  268. yield = yieldTotal[n];
  269. }
  270. v = hist[n]->GetBinContent(bin);
  271. if (v != 0 && yield > 1.) { // error is wrong for one count
  272. yieldSum += yield;
  273. yield2Sum += yield*yield;
  274. vSum += yield * v;
  275. v2Sum += yield * v*v;
  276. error2Sum += pow(yield * hist[n]->GetBinError(bin), 2.);
  277. }
  278. }
  279. } // nFiles
  280. if (yieldSum) {
  281. content = vSum / yieldSum;
  282. v2Sum /= yieldSum;
  283. yieldSum2 = yieldSum*yieldSum;
  284. yield2Sum /= yieldSum2;
  285. error2Sum /= yieldSum2;
  286. if (strstr(baseName[pageNumber],"_G")) {
  287. error = 0.;
  288. } else if (nFiles > 1 && yield2Sum != 1.) {
  289. error = sqrt(error2Sum + (v2Sum - content*content) / (1/yield2Sum - 1));
  290. } else {
  291. error = sqrt(error2Sum); // only 1st term
  292. }
  293. if (strstr(histName.Data(), "v")) {
  294. // cout << histName << " 1st, 2nd: " << sqrt(error2Sum) << ", " << sqrt(error*error - error2Sum) << endl;
  295. }
  296. //error = sqrt(error2Sum); // only 1st term
  297. }
  298. hist[nFiles]->SetBinContent(bin, content);
  299. hist[nFiles]->SetBinError(bin, error);
  300. if (strstr(histName.Data(), "Vtheta")) {
  301. //cout << content << ", " << error2Sum << ", " << v2Sum << ", " << content*content << ", " << yield2Sum << ", " << error << endl;
  302. Vtheta = hist[nFiles]->GetBinContent(bin);
  303. VthetaErr = hist[nFiles]->GetBinError(bin);
  304. if (VthetaErr > 0.001) { // calculate r0theta from <Vtheta>
  305. //float perCentErr = Vtheta ? 100. * VthetaErr / Vtheta : 0.;
  306. //cout << "Vtheta= " << Vtheta << " +/- " << perCentErr << "%" << endl;
  307. r0 = Vtheta ? j01 / Vtheta : 0.; // Eq. 9
  308. r0Err = Vtheta ? r0 * VthetaErr / Vtheta : 0.;
  309. //cout << hist_r0theta[selN][harN]->GetBinContent(bin) << ", " << r0 << endl;
  310. hist_r0theta[selN][harN]->SetBinContent(bin, r0);
  311. hist_r0theta[selN][harN]->SetBinError(bin, r0Err);
  312. //cout << "r0= " << r0 << " +/- " << r0Err << endl;
  313. }
  314. }
  315. } // bins
  316. } else if ((strstr(baseName[pageNumber],"Cos")) ||
  317. (strstr(baseName[pageNumber],"Res"))) { // with error weighting
  318. cout << " With error weighting" << endl;
  319. double content, error, errorSq, meanContent, meanError, weight;
  320. for (int bin = 0; bin < nBins; bin++) {
  321. meanContent = 0.;
  322. meanError = 0.;
  323. weight = 0.;
  324. for (int n = 0; n < nFiles; n++) {
  325. if (hist[n]) {
  326. content = hist[n]->GetBinContent(bin);
  327. error = hist[n]->GetBinError(bin);
  328. errorSq = error * error;
  329. if (errorSq > 0.) {
  330. meanContent += content / errorSq;
  331. weight += 1. / errorSq;
  332. }
  333. }
  334. } // nFiles
  335. if (weight > 0.) {
  336. meanContent /= weight;
  337. meanError = sqrt(1. / weight);
  338. hist[nFiles]->SetBinContent(bin, meanContent);
  339. hist[nFiles]->SetBinError(bin, meanError);
  340. }
  341. } // bins
  342. } else { // with yield weighting
  343. cout << " with yield weighted averaging" << endl;
  344. double v, yield, content, error, y, pt;
  345. double vSum, error2sum, yieldSum;
  346. for (int bin = 0; bin < nBins; bin++) {
  347. v = 0.;
  348. vSum = 0.;
  349. content = 0.;
  350. error = 0.;
  351. error2sum = 0.;
  352. yield = 0.;
  353. yieldSum = 0.;
  354. for (int n = 0; n < nFiles; n++) {
  355. if (hist[n]) {
  356. if (strstr(histName.Data(),"v2D")) {
  357. yield = yieldPartHist[n]->GetBinContent(bin);
  358. } else if (strstr(histName.Data(),"vEta")) {
  359. yield = yieldPartHist[n]->Integral(bin, bin, 1, yBins);
  360. if (selN==0 && harN==0) {
  361. y = yieldPartHist[n]->GetXaxis()->GetBinCenter(bin);
  362. yieldY->Fill(y, yield);
  363. }
  364. } else if (strstr(histName.Data(),"vPt")) {
  365. yield = yieldPartHist[n]->Integral(1, xBins, bin, bin);
  366. if (selN==0 && harN==0) {
  367. pt = yieldPartHist[n]->GetYaxis()->GetBinCenter(bin);
  368. yieldPt->Fill(pt, yield);
  369. }
  370. } else { // _v_ and _q_
  371. yield = yieldPartHist[n]->Integral();
  372. }
  373. v = hist[n]->GetBinContent(bin);
  374. if (v != 0. && yield > 1.) { // error is wrong for one count
  375. yieldSum += yield;
  376. vSum += yield * v;
  377. error2sum += pow(yield * hist[n]->GetBinError(bin), 2.);
  378. }
  379. }
  380. } // nFiles
  381. if (yieldSum) {
  382. content = vSum / yieldSum;
  383. error = sqrt(error2sum) / yieldSum;
  384. }
  385. hist[nFiles]->SetBinContent(bin, content);
  386. hist[nFiles]->SetBinError(bin, error);
  387. } // bin
  388. } // else
  389. } // harN
  390. } // selN
  391. cout << endl;
  392. } // pageNumber
  393. cout << endl;
  394. //histFile[nFiles]->ls();
  395. histFile[nFiles]->Write(0, TObject::kOverwrite);
  396. histFile[nFiles]->Close();
  397. delete histFile[nFiles];
  398. sprintf(rmCommand, "rm %s", listFileName);
  399. system(rmCommand);
  400. }
  401. ///////////////////////////////////////////////////////////////////////////////
  402. //
  403. // $Log: doFlowSumAll.C,v $
  404. // Revision 1.6 2011/07/25 15:54:50 posk
  405. // Added correction for non-flatness of event plane.
  406. //
  407. // Revision 1.5 2011/03/10 18:56:34 posk
  408. // Added histogram for laboratory azimuthal distribution of particles.
  409. //
  410. // Revision 1.4 2010/09/30 19:28:21 posk
  411. // Instead of reversing the weight for negative pseudrapidity for odd harmonics,
  412. // it is now done only for the first harmonic.
  413. // Recentering is now done for all harmonics.
  414. //
  415. // Revision 1.3 2007/02/06 19:00:50 posk
  416. // In Lee Yang Zeros method, introduced recentering of Q vector.
  417. // Reactivated eta symmetry cut.
  418. //
  419. // Revision 1.1 2006/03/22 21:59:51 posk
  420. // Macro and shell script to sum the outputs of the second pass.
  421. //
  422. //
  423. ///////////////////////////////////////////////////////////////////////////////