diff --git a/Utilities/CMakeLists.txt b/Utilities/CMakeLists.txt index 440514c381c6a..1028a895cfe52 100644 --- a/Utilities/CMakeLists.txt +++ b/Utilities/CMakeLists.txt @@ -8,11 +8,6 @@ # granted to it by virtue of its status as an Intergovernmental Organization or # submit itself to any jurisdiction. -if(ALIROOT) - # FIXME: not tested - add_subdirectory(hough) -endif(ALIROOT) - add_subdirectory(aliceHLTwrapper) add_subdirectory(O2MessageMonitor) add_subdirectory(DataFlow) diff --git a/Utilities/hough/CMakeLists.txt b/Utilities/hough/CMakeLists.txt deleted file mode 100644 index 12c3adcc99d92..0000000000000 --- a/Utilities/hough/CMakeLists.txt +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright CERN and copyright holders of ALICE O2. This software is distributed -# under the terms of the GNU General Public License v3 (GPL Version 3), copied -# verbatim in the file "COPYING". -# -# See http://alice-o2.web.cern.ch/license for full licensing information. -# -# In applying this license CERN does not waive the privileges and immunities -# granted to it by virtue of its status as an Intergovernmental Organization or -# submit itself to any jurisdiction. - -set(MODULE_NAME "hough") - -o2_setup(NAME ${MODULE_NAME}) - -set(Exe_Names runHough) - -set(Exe_Source runHough.cxx) - -set(BUCKET_NAME hough_bucket) - -list(LENGTH Exe_Names _length) -math(EXPR _length ${_length}-1) - -foreach(_file RANGE 0 ${_length}) - list(GET Exe_Names ${_file} _name) - list(GET Exe_Source ${_file} _src) - o2_generate_executable(EXE_NAME - ${_name} - SOURCES - ${_src} - BUCKET_NAME - ${BUCKET_NAME}) -endforeach(_file RANGE 0 ${_length}) diff --git a/Utilities/hough/README.md b/Utilities/hough/README.md deleted file mode 100644 index 3d8f44e8ccb43..0000000000000 --- a/Utilities/hough/README.md +++ /dev/null @@ -1,46 +0,0 @@ - - -# Hough Transform - -This is the groundwork for the Hough Transform algorithm implementation. The runHough executable takes as an argument an event number (i.e. runHough 032) and for the given event it loads all clusters from the corresponding data files. - -### Step by Step installation and execution - -1. The runHough executable depends on the AliRoot HLT libraries. For this, an optional dependency to AliRoot has been added to the AliceO2 framework. To build the executable, the path to the AliRoot installation must be given at configuration time. For example: - - cmake -DCMAKE_INSTALL_PREFIX:PATH=.. -DCMAKE_CXX_FLAGS="-std=c++11" .. -DALIROOT="/opt/alice/external/AliRoot" - -It is important that AliRoot, FairRoot and AliceO2 have been built against the same version of ROOT. To ensure that the prerequisite packages were compiled and installed correctly, the alfaconfig.sh script that is included as part of the FairSoft installation can be used. - -2. Raw data files should be retrieved from AliEn for a given run. The implementation can be tested using the raw files corresponding to run 167808: - - alien-token-init - aliensh - - cd /alice/data/2011/LHC11h/000167808/raw/ - cp 11000167808000.10.root file://tmp - exit - - mv /tmp/11000167808000.10.root raw.root - -Then, the necessary scripts to perform the clusterization for the data should be coppied to the current directory: - - cp ${AliRoot}/HLT/exa/recraw-local.C . - cp ${AliRoot}/HLT/exa/EnableHLTInGRP.C . - cp ${AliceO2}devices/aliceHLTwrapper/macros/hltConfigurations.C . - -Finally, the following commands should executed: - - aliroot -b -q -l hltConfigurations.C recraw-local.C'("raw.root", "raw://", 0, 0, "HLT TPC", "loglevel=0x79 chains=cluster-collection", "local://./OCDB")' - - aliroot -b -q -l EnableHLTInGRP.C'(167808, "local://./OCDB", "local://./OCDB")' - rm galice.root QA.root - aliroot -b -q -l hltConfigurations.C recraw-local.C'("raw.root", "local://OCDB", -1, -1, "HLT", "loglevel=0x79 chains=cluster-collection")' 2>&1 | tee cluster-collection.log - -The result should be a directory, called "emulated-tpc-clusters" that will be the input of runHough. By executing - - runHough 032 - -the executable will load all the clusters from the emulated-tpc-clusters/event032 subdirectory. After the execution, a graphics file "clusters.pdf" will be created in the current directory depicting the coordinates of the loaded clusters. diff --git a/Utilities/hough/runHough.cxx b/Utilities/hough/runHough.cxx deleted file mode 100644 index 5c767e98bcf60..0000000000000 --- a/Utilities/hough/runHough.cxx +++ /dev/null @@ -1,180 +0,0 @@ -// Copyright CERN and copyright holders of ALICE O2. This software is -// distributed under the terms of the GNU General Public License v3 (GPL -// Version 3), copied verbatim in the file "COPYING". -// -// See http://alice-o2.web.cern.ch/license for full licensing information. -// -// In applying this license CERN does not waive the privileges and immunities -// granted to it by virtue of its status as an Intergovernmental Organization -// or submit itself to any jurisdiction. - -/// \file runHough.cxx -/// \brief Implementation of a cluster loader -/// \author Charis Kouzinopoulos - -#include "AliHLTTPCTrackGeometry.h" -#include "AliHLTTPCClusterDataFormat.h" -#include "AliHLTTPCSpacePointContainer.h" -#include "AliHLTComponent.h" -#include "AliHLTTPCDefinitions.h" - -#include "TCanvas.h" -#include "TGraph2D.h" - -#include "boost/filesystem.hpp" - -#include - -std::unique_ptr spacepoints; -vector clusterCoordinates; - -void drawData(int totalNumberOfClusters, std::string dataFilename) -{ - TCanvas* c1 = new TCanvas("c1", dataFilename.c_str(), 0, 0, 800, 600); - TGraph2D* dt = new TGraph2D(10000); - - for (Int_t i = 0; i < totalNumberOfClusters; i++) { - dt->SetPoint(i, clusterCoordinates[i * 4 + 1], clusterCoordinates[i * 4 + 2], clusterCoordinates[i * 4 + 3]); - } - - // Draw with colored dots - dt->SetMarkerStyle(1); - dt->Draw("pcol"); - - c1->Print("clusters.pdf"); -} - -void printData(int totalNumberOfClusters) -{ - cout << "Cluster ID" << setw(13) << "X coordinate" << setw(13) << "Y coordinate" << setw(13) << "Z coordinate" - << endl; - - for (int i = 0; i < totalNumberOfClusters; i++) { - cout << (AliHLTUInt32_t)clusterCoordinates[i * 4] << setw(13) << clusterCoordinates[i * 4 + 1] << setw(13) - << clusterCoordinates[i * 4 + 2] << setw(13) << clusterCoordinates[i * 4 + 3] << endl; - } -} - -void addDataToCoordinatesVector(AliHLTUInt32_t clusterID, float XCoordinate, float YCoordinate, float ZCoordinate) -{ - clusterCoordinates.push_back((float)clusterID); - clusterCoordinates.push_back(XCoordinate); - clusterCoordinates.push_back(YCoordinate); - clusterCoordinates.push_back(ZCoordinate); -} - -int processData(std::string dataPath, std::string dataType, std::string dataOrigin) -{ - // Open data file for reading - std::ifstream inputData(dataPath.c_str(), std::ifstream::binary); - if (!inputData) { - std::cerr << "Error, cluster data file " << dataPath << " could not be accessed" << endl; - std::exit(1); - } - - // Get length of file - inputData.seekg(0, inputData.end); - int dataLength = inputData.tellg(); - inputData.seekg(0, inputData.beg); - - // Allocate memory and read file to memory - char* inputBuffer = new char[dataLength]; - inputData.read(inputBuffer, dataLength); - inputData.close(); - - // Retrieve the TPC slice and partition from the filename - std::string currentSliceString(dataPath, dataPath.length() - 6, 2); - std::string currentPartitionString(dataPath, dataPath.length() - 2, 2); - - AliHLTUInt8_t currentSlice = std::stoul(currentSliceString, nullptr, 16); - AliHLTUInt8_t currentPartition = std::stoul(currentPartitionString, nullptr, 16); - - // Initialize a cluster point collection - spacepoints = std::unique_ptr(new AliHLTTPCSpacePointContainer); - if (!spacepoints.get()) { - std::cerr << "Error, could not create a space point collection" << endl; - std::exit(1); - } - - // Create an AliHLTComponentBlockData object, fill it with default values and then set its pointer to the data buffer - AliHLTComponentBlockData bd; - AliHLTComponent::FillBlockData(bd); - bd.fPtr = inputBuffer; - bd.fSize = dataLength; - // bd.fDataType=kAliHLTVoidDataType; - AliHLTComponent::SetDataType(bd.fDataType, dataType.c_str(), dataOrigin.c_str()); - bd.fSpecification = kAliHLTVoidDataSpec; - - // Set slice and partition - AliHLTTPCDefinitions::EncodeDataSpecification(currentSlice, currentSlice, currentPartition, currentPartition); - - // Add the AliHLTComponentBlockData object to AliHLTTPCSpacePointContainer - int numberOfClusters = spacepoints->AddInputBlock(&bd); - - // cout << *spacepoints << endl; - - // Retrieve the cluster information from AliHLTTPCSpacePointContainer - std::vector clusterIDs; - spacepoints->GetClusterIDs(clusterIDs); - - // Append the cluster IDs and their X, Y and Z coordinates to the clusterCoordinates vector - for (vector::const_iterator element = clusterIDs.begin(); element != clusterIDs.end(); element++) { - AliHLTUInt32_t clusterID = *element; - - addDataToCoordinatesVector(clusterID, spacepoints->GetX(clusterID), spacepoints->GetY(clusterID), - spacepoints->GetZ(clusterID)); - } - - // De-allocate memory space - if (inputBuffer) { - delete[] inputBuffer; - } - inputBuffer = NULL; - - return numberOfClusters; -} - -int main(int argc, char** argv) -{ - if (argc != 2) { - std::cerr << "Usage: " << argv[0] << " " << endl; - std::exit(1); - } - - // Create data path - std::string dataFilename = "emulated-tpc-clusters/event"; - dataFilename += argv[1]; - - boost::filesystem::path dataPath(dataFilename); - boost::filesystem::directory_iterator endIterator; - - typedef std::multimap result_set_t; - result_set_t result_set; - - std::string dataType = "CLUSTERS", dataOrigin = "TPC "; - - int totalNumberOfClusters = 0, totalNumberOfDataFiles = 0; - - // Traverse the filesystem and execute processData for each cluster file found - if (boost::filesystem::exists(dataPath) && boost::filesystem::is_directory(dataPath)) { - for (boost::filesystem::directory_iterator directoryIterator(dataPath); directoryIterator != endIterator; - ++directoryIterator) { - if (boost::filesystem::is_regular_file(directoryIterator->status())) { - totalNumberOfClusters += processData(directoryIterator->path().string(), dataType, dataOrigin); - totalNumberOfDataFiles++; - } - } - } else { - std::cerr << "Path " << dataPath.string() << "/ could not be found or does not contain any valid data files" - << endl; - exit(1); - } - - cout << "Added " << totalNumberOfClusters << " clusters from " << totalNumberOfDataFiles << " data files" << endl; - - // printData(totalNumberOfClusters); - - drawData(totalNumberOfClusters, dataFilename); - - return 0; -}