diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index db9ad01fa8..51bd6a7da6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -72,6 +72,7 @@ shared_for_alert_tests: coatjava:singularity: image: eicweb.phy.anl.gov:4567/containers/image_recipes/ubuntu_dind:latest + needs: ["coatjava_build"] tags: - silicon allow_failure: true @@ -82,4 +83,21 @@ coatjava:singularity: paths: - coatjava.sif +create-pages: + image: codecr.jlab.org/hallb/alert/coatjava/coatjava:development + script: + - export MAVEN_OPTS=" -Dfile.encoding=UTF-8" + - echo "${CI_COMMIT_REF_NAME}" + - ls -lrth + - mvn --version + - java --version + - ls -lrth + - pwd + - ls -lrth docs/javadoc + - ./build-javadocs.sh + - ls -lrth + - ls -lrth docs/ + - ls -lrth docs/javadoc + - mv docs/javadoc public + pages: true # specifies that this is a Pages job and publishes the default public directory diff --git a/build-javadocs.sh b/build-javadocs.sh index bfdfe87abd..4932f9ee9d 100755 --- a/build-javadocs.sh +++ b/build-javadocs.sh @@ -5,8 +5,10 @@ set -e +export MAVEN_OPTS=" -Dfile.encoding=UTF-8" + ##### generate documentation -mvn javadoc:javadoc -Ddoclint=none +mvn javadoc:javadoc -Ddoclint=none ##### collect documentation diff --git a/pom.xml b/pom.xml index a4d40c561c..684e11dd46 100644 --- a/pom.xml +++ b/pom.xml @@ -1,10 +1,16 @@ + + UTF-8 + UTF-8 + + 4.0.0 org.jlab.clas clas12 12.0.1t-SNAPSHOT pom + org.jlab.clas clas12rec diff --git a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/AI/Model.java b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/AI/Model.java index 3f196db93c..83813a8561 100644 --- a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/AI/Model.java +++ b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/AI/Model.java @@ -16,6 +16,11 @@ import java.io.IOException; import java.nio.file.Paths; +/** Model of What. + * + * + * \todo fix class name + */ public class Model { private ZooModel model; diff --git a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/Cluster/ClusterFinder.java b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/Cluster/ClusterFinder.java index 7a1421815a..87a4446b1c 100644 --- a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/Cluster/ClusterFinder.java +++ b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/Cluster/ClusterFinder.java @@ -5,6 +5,11 @@ import java.util.ArrayList; import java.util.List; +/** ClusterFinder + * + * \todo description of what it does and how it works + * + */ public class ClusterFinder { private final ArrayList _AHDCClusters = new ArrayList<>(); @@ -55,6 +60,7 @@ private void find_associate_cluster(PreCluster precluster, List AHDC } public void findCluster(List AHDC_precluster_list) { + /// \todo parameters should be exposed int window = 30; int minimal_distance = 10; diff --git a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/Distance/Distance.java b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/Distance/Distance.java index 51b9a1bbe9..0d9d10ce87 100644 --- a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/Distance/Distance.java +++ b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/Distance/Distance.java @@ -7,6 +7,11 @@ import java.util.Arrays; import java.util.List; +/** Distance. + * + * \todo What is this class and what does it do? + * + */ public class Distance { private ArrayList _AHDCTracks; diff --git a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/HelixFit/HelixFitJava.java b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/HelixFit/HelixFitJava.java index 55a50a4c78..7f853b3747 100644 --- a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/HelixFit/HelixFitJava.java +++ b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/HelixFit/HelixFitJava.java @@ -1,8 +1,14 @@ package org.jlab.rec.ahdc.HelixFit; - +/** Helix Fit. + * + * This appears to be some code translation. + */ public class HelixFitJava { + /** \todo What does this method do + * \what does its name even mean? + */ void rwsmav(double r[], double a[], double v[], int n) { // Author: Martin Poppe. r[n] = a[n,n]*v[n] diff --git a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/HelixFit/HelixFitObject.java b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/HelixFit/HelixFitObject.java index 4a9ad42c90..3e8591a13b 100644 --- a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/HelixFit/HelixFitObject.java +++ b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/HelixFit/HelixFitObject.java @@ -1,6 +1,6 @@ package org.jlab.rec.ahdc.HelixFit; -/** +/** Helix track model. * * @author davidpayette */ diff --git a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/PreCluster/PreCluster.java b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/PreCluster/PreCluster.java index 77140769c8..0045641849 100644 --- a/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/PreCluster/PreCluster.java +++ b/reconstruction/alert/src/main/java/org/jlab/rec/ahdc/PreCluster/PreCluster.java @@ -4,6 +4,11 @@ import java.util.ArrayList; +/** PreCluster. + * + * \todo What is a pre cluster? + * + */ public class PreCluster implements Comparable { private int _Id; diff --git a/reconstruction/alert/src/main/java/org/jlab/rec/alert/banks/RecoBankWriter.java b/reconstruction/alert/src/main/java/org/jlab/rec/alert/banks/RecoBankWriter.java new file mode 100644 index 0000000000..4f19cf05b3 --- /dev/null +++ b/reconstruction/alert/src/main/java/org/jlab/rec/alert/banks/RecoBankWriter.java @@ -0,0 +1,81 @@ +package org.jlab.rec.alert.banks; + +import java.util.ArrayList; +import org.jlab.io.base.DataBank; +import org.jlab.io.base.DataEvent; +import org.jlab.rec.alert.projections.TrackProjection; + +/** + * The ALERT {@code RecoBankWriter} writes the banks needed for the ALERT + * reconstruction: track projections. + * + * @author Noemie Pilleux + * @author Whit Armstrong + */ +public class RecoBankWriter { + + /** + * Writes the bank of track projections. + * + * @param event the {@link DataEvent} in which to add the bank + * @param projections the {@link ArrayList} of {@link TrackProjection} + * containing the track projection info to be added to the bank + * + * @return {@link DataBank} the bank with all the projected tracks in the + * event. + * + */ + public static DataBank fillProjectionsBank(DataEvent event, ArrayList projections) { + + DataBank bank = event.createBank("ALERT::Projections", projections.size()); + + if (bank == null) { + System.err.println("COULD NOT CREATE A ALERT::Projections BANK!!!!!!"); + return null; + } + for (int i = 0; i < projections.size(); i++) { + TrackProjection projection = projections.get(i); + bank.setShort("id", i, (short) (i + 1)); + bank.setShort("trackID", i, (short) projection.getTrackID()); + bank.setFloat("x_at_bar", i, (float) projection.getBarIntersect().x()); + bank.setFloat("y_at_bar", i, (float) projection.getBarIntersect().y()); + bank.setFloat("z_at_bar", i, (float) projection.getBarIntersect().z()); + bank.setFloat("L_at_bar", i, (float) projection.getBarPathLength()); + bank.setFloat("L_in_bar", i, (float) projection.getBarInPathLength()); + bank.setFloat("x_at_wedge", i, (float) projection.getWedgeIntersect().x()); + bank.setFloat("y_at_wedge", i, (float) projection.getWedgeIntersect().y()); + bank.setFloat("z_at_wedge", i, (float) projection.getWedgeIntersect().z()); + bank.setFloat("L_at_wedge", i, (float) projection.getWedgePathLength()); + bank.setFloat("L_in_wedge", i, (float) projection.getWedgeInPathLength()); + } + return bank; + } + + /** + * Appends the alert match banks to an event. + * + * @param event the {@link DataEvent} in which to append the banks + * @param projections the {@link ArrayList} of {@link TrackProjection} containing the + * track projections info to be added + * + * @return 0 if it worked, 1 if it failed + * + */ + public int appendMatchBanks(DataEvent event, ArrayList projections) { + + DataBank projbank = this.fillProjectionsBank(event, projections); + if (projbank != null) { + event.appendBank(projbank); + } else { + return 1; + } + return 0; + } + + /** + * @param args the command line arguments + */ + public static void main(String[] args) { + } + +} diff --git a/reconstruction/alert/src/main/java/org/jlab/rec/alert/projections/TrackProjector.java b/reconstruction/alert/src/main/java/org/jlab/rec/alert/projections/TrackProjector.java index 4807be813f..2daab87097 100644 --- a/reconstruction/alert/src/main/java/org/jlab/rec/alert/projections/TrackProjector.java +++ b/reconstruction/alert/src/main/java/org/jlab/rec/alert/projections/TrackProjector.java @@ -91,49 +91,51 @@ public void projectTracks(DataEvent event) {//, CalibrationConstantsLoader ccdb) projections.clear(); String track_bank_name = "AHDC::Track"; - if (event == null) { // check if there is an event - //System.out.print(" no event \n"); - } else if (event.hasBank(track_bank_name) == false) { - // check if there are ahdc tracks in the event - //System.out.print("no tracks \n"); - } else { - DataBank bank = event.getBank(track_bank_name); - int nt = bank.rows(); // number of tracks - TrackProjection projection = new TrackProjection(); - for (int i = 0; i < nt; i++) { - double x = bank.getFloat("x", i); - double y = bank.getFloat("y", i); - double z = bank.getFloat("z", i); - double px = bank.getFloat("px", i); - double py = bank.getFloat("py", i); - double pz = bank.getFloat("pz", i); - int id = nt + 1;//To be replaced by track id if it is added to the out bank - - int q = -1; //need the charge sign from tracking - - Units units = Units.MM; //can be MM or CM. - - double xb = 0; - double yb = 0; - - //momenta must be in GeV for the helix class - Helix helix = new Helix(x, y, z, px / 1000., py / 1000., pz / 1000., q, b, xb, yb, units); - - //Intersection points with the middle of the bar or wedge - projection.setBarIntersect(helix.getHelixPointAtR(Parameters.BAR_MIDDLE_RADIUS)); - projection.setWedgeIntersect(helix.getHelixPointAtR(Parameters.WEDGE_MIDDLE_RADIUS)); - - double rVertex = Math.sqrt(x * x + y * y); - - //Path length to the middle of the bar or wedge - projection.setBarPathLength((float) helix.getPathLength(rVertex, Parameters.BAR_MIDDLE_RADIUS)); - projection.setWedgePathLength((float) helix.getPathLength(rVertex, Parameters.WEDGE_MIDDLE_RADIUS)); - //Path length from the inner radius to the middle radius - projection.setBarInPathLength((float) helix.getPathLength(Parameters.BAR_INNER_RADIUS, Parameters.BAR_MIDDLE_RADIUS)); - projection.setWedgeInPathLength((float) helix.getPathLength(Parameters.WEDGE_INNER_RADIUS, Parameters.WEDGE_MIDDLE_RADIUS)); - projection.setTrackID(id); - projections.add(projection); - } + + // check if there is an event + if (event == null) return; + //System.out.print(" no event \n"); + + // check if there are ahdc tracks in the event + if (event.hasBank(track_bank_name) == false) return; + //System.out.print("no tracks \n"); + + DataBank bank = event.getBank(track_bank_name); + int nt = bank.rows(); // number of tracks + TrackProjection projection = new TrackProjection(); + for (int i = 0; i < nt; i++) { + double x = bank.getFloat("x", i); + double y = bank.getFloat("y", i); + double z = bank.getFloat("z", i); + double px = bank.getFloat("px", i); + double py = bank.getFloat("py", i); + double pz = bank.getFloat("pz", i); + int id = nt + 1;//To be replaced by track id if it is added to the out bank + + int q = -1; //need the charge sign from tracking + + Units units = Units.MM; //can be MM or CM. + + double xb = 0; + double yb = 0; + + //momenta must be in GeV for the helix class + Helix helix = new Helix(x, y, z, px / 1000., py / 1000., pz / 1000., q, b, xb, yb, units); + + //Intersection points with the middle of the bar or wedge + projection.setBarIntersect(helix.getHelixPointAtR(Parameters.BAR_MIDDLE_RADIUS)); + projection.setWedgeIntersect(helix.getHelixPointAtR(Parameters.WEDGE_MIDDLE_RADIUS)); + + double rVertex = Math.sqrt(x * x + y * y); + + //Path length to the middle of the bar or wedge + projection.setBarPathLength((float) helix.getPathLength(rVertex, Parameters.BAR_MIDDLE_RADIUS)); + projection.setWedgePathLength((float) helix.getPathLength(rVertex, Parameters.WEDGE_MIDDLE_RADIUS)); + //Path length from the inner radius to the middle radius + projection.setBarInPathLength((float) helix.getPathLength(Parameters.BAR_INNER_RADIUS, Parameters.BAR_MIDDLE_RADIUS)); + projection.setWedgeInPathLength((float) helix.getPathLength(Parameters.WEDGE_INNER_RADIUS, Parameters.WEDGE_MIDDLE_RADIUS)); + projection.setTrackID(id); + projections.add(projection); } } @@ -148,56 +150,56 @@ public void projectMCTracks(DataEvent event) {//, CalibrationConstantsLoader ccd projections.clear(); String track_bank_name = "MC::Particle"; - if (event == null) { // check if there is an event - //System.out.print(" no event \n"); - } else if (event.hasBank(track_bank_name) == false) { - // check if there are ahdc tracks in the event - //System.out.print("no tracks \n"); - } else { - DataBank bank = event.getBank(track_bank_name); - int nt = bank.rows(); // number of tracks - TrackProjection projection = new TrackProjection(); - - for (int i = 0; i < nt; i++) { - - double x = bank.getFloat("vx", i); - double y = bank.getFloat("vy", i); - double z = bank.getFloat("vz", i); - double px = bank.getFloat("px", i); - double py = bank.getFloat("py", i); - double pz = bank.getFloat("pz", i); - int id = bank.getShort("id", i); - //Put everything in MM - x = x * 10; - y = y * 10; - z = z * 10; - - Units units = Units.MM; - - int q = -1; //need the charge sign from tracking - - double xb = 0; - double yb = 0; - - //momenta must be in GeV for the helix class - Helix helix = new Helix(x, y, z, px, py, pz, q, b, xb, yb, units); - - //Intersection points with the middle of the bar or wedge - projection.setBarIntersect(helix.getHelixPointAtR(Parameters.BAR_MIDDLE_RADIUS)); - projection.setWedgeIntersect(helix.getHelixPointAtR(Parameters.WEDGE_MIDDLE_RADIUS)); - - double rVertex = Math.sqrt(x * x + y * y); - - //Path length to the middle of the bar or wedge - projection.setBarPathLength((float) helix.getPathLength(rVertex, Parameters.BAR_MIDDLE_RADIUS)); - projection.setWedgePathLength((float) helix.getPathLength(rVertex, Parameters.WEDGE_MIDDLE_RADIUS)); - //Path length from the inner radius to the middle radius - projection.setBarInPathLength((float) helix.getPathLength(Parameters.BAR_INNER_RADIUS, Parameters.BAR_MIDDLE_RADIUS)); - projection.setWedgeInPathLength((float) helix.getPathLength(Parameters.WEDGE_INNER_RADIUS, Parameters.WEDGE_MIDDLE_RADIUS)); - - projection.setTrackID(id); - projections.add(projection); - } + if (event == null) return; + //System.out.print(" no event \n"); + + // check if there are ahdc tracks in the event + if (event.hasBank(track_bank_name) == false) return; + //System.out.print("no tracks \n"); + + DataBank bank = event.getBank(track_bank_name); + int nt = bank.rows(); // number of tracks + TrackProjection projection = new TrackProjection(); + + for (int i = 0; i < nt; i++) { + + double x = bank.getFloat("vx", i); + double y = bank.getFloat("vy", i); + double z = bank.getFloat("vz", i); + double px = bank.getFloat("px", i); + double py = bank.getFloat("py", i); + double pz = bank.getFloat("pz", i); + int id = bank.getShort("id", i); + //Put everything in MM + x = x * 10; + y = y * 10; + z = z * 10; + + Units units = Units.MM; + + int q = -1; //need the charge sign from tracking + + double xb = 0; + double yb = 0; + + //momenta must be in GeV for the helix class + Helix helix = new Helix(x, y, z, px, py, pz, q, b, xb, yb, units); + + //Intersection points with the middle of the bar or wedge + projection.setBarIntersect(helix.getHelixPointAtR(Parameters.BAR_MIDDLE_RADIUS)); + projection.setWedgeIntersect(helix.getHelixPointAtR(Parameters.WEDGE_MIDDLE_RADIUS)); + + double rVertex = Math.sqrt(x * x + y * y); + + //Path length to the middle of the bar or wedge + projection.setBarPathLength((float) helix.getPathLength(rVertex, Parameters.BAR_MIDDLE_RADIUS)); + projection.setWedgePathLength((float) helix.getPathLength(rVertex, Parameters.WEDGE_MIDDLE_RADIUS)); + //Path length from the inner radius to the middle radius + projection.setBarInPathLength((float) helix.getPathLength(Parameters.BAR_INNER_RADIUS, Parameters.BAR_MIDDLE_RADIUS)); + projection.setWedgeInPathLength((float) helix.getPathLength(Parameters.WEDGE_INNER_RADIUS, Parameters.WEDGE_MIDDLE_RADIUS)); + + projection.setTrackID(id); + projections.add(projection); } } diff --git a/reconstruction/alert/src/main/java/org/jlab/rec/service/AHDCEngine.java b/reconstruction/alert/src/main/java/org/jlab/rec/service/AHDCEngine.java deleted file mode 100644 index 550450be64..0000000000 --- a/reconstruction/alert/src/main/java/org/jlab/rec/service/AHDCEngine.java +++ /dev/null @@ -1,286 +0,0 @@ -package org.jlab.service.ahdc; - -import org.jlab.clas.reco.ReconstructionEngine; -import org.jlab.clas.tracking.kalmanfilter.Material; -import org.jlab.io.base.DataBank; -import org.jlab.io.base.DataEvent; -import org.jlab.io.hipo.HipoDataSource; -import org.jlab.io.hipo.HipoDataSync; -import org.jlab.rec.ahdc.AI.*; -import org.jlab.rec.ahdc.Banks.RecoBankWriter; -import org.jlab.rec.ahdc.Cluster.Cluster; -import org.jlab.rec.ahdc.Cluster.ClusterFinder; -import org.jlab.rec.ahdc.Distance.Distance; -import org.jlab.rec.ahdc.HelixFit.HelixFitJava; -import org.jlab.rec.ahdc.Hit.Hit; -import org.jlab.rec.ahdc.Hit.HitReader; -import org.jlab.rec.ahdc.Hit.TrueHit; -import org.jlab.rec.ahdc.HoughTransform.HoughTransform; -import org.jlab.rec.ahdc.KalmanFilter.KalmanFilter; -import org.jlab.rec.ahdc.KalmanFilter.MaterialMap; -import org.jlab.rec.ahdc.PreCluster.PreCluster; -import org.jlab.rec.ahdc.PreCluster.PreClusterFinder; -import org.jlab.rec.ahdc.Track.Track; -import org.jlab.rec.ahdc.Mode; -import org.jlab.rec.alert.constants.CalibrationConstantsLoader; - -import java.io.File; -import java.util.*; - -public class AHDCEngine extends ReconstructionEngine { - - private boolean simulation; - private String findingMethod; - private HashMap materialMap; - private Model model; - - private Mode mode = Mode.CV_Track_Finding; - - public AHDCEngine() { - super("ALERT", "ouillon", "1.0.1"); - } - - @Override - public boolean init() { - simulation = false; - findingMethod = "distance"; - - if (materialMap == null) { - materialMap = MaterialMap.generateMaterials(); - } - - if(this.getEngineConfigString("Mode")!=null) { - if (Objects.equals(this.getEngineConfigString("Mode"), Mode.AI_Track_Finding.name())) - mode = Mode.AI_Track_Finding; - - if (Objects.equals(this.getEngineConfigString("Mode"), Mode.CV_Track_Finding.name())) - mode = Mode.CV_Track_Finding; - - } - - if (mode == Mode.AI_Track_Finding) { - model = new Model(); - } - - // Requires calibration constants - String[] alertTables = new String[] { - "/calibration/alert/ahdc/time_offsets", - "/calibration/alert/ahdc/time_to_distance", - "/calibration/alert/atof/effective_velocity", - "/calibration/alert/atof/time_walk", - "/calibration/alert/atof/attenuation", - "/calibration/alert/atof/time_offsets" - }; - requireConstants(Arrays.asList(alertTables)); - - return true; - } - - int Run = -1; - - @Override - public boolean processDataEvent(DataEvent event) { - - int runNo = 10; - int eventNo = 777; - double magfield = 50.0; - double magfieldfactor = 1; - - if (event.hasBank("RUN::config")) { - DataBank bank = event.getBank("RUN::config"); - runNo = bank.getInt("run", 0); - eventNo = bank.getInt("event", 0); - magfieldfactor = bank.getFloat("solenoid", 0); - if (runNo <= 0) { - System.err.println("RTPCEngine: got run <= 0 in RUN::config, skipping event."); - return false; - } - int newRun = Run; - newRun = runNo; - // Load the constants - //------------------- - if(Run!=newRun) { - CalibrationConstantsLoader.Load(newRun,"default",this.getConstantsManager()); - Run = newRun; - } - } - - magfield = 50 * magfieldfactor; - - if (event.hasBank("AHDC::adc")) { - // I) Read raw hit - HitReader hitRead = new HitReader(event, simulation); - - ArrayList AHDC_Hits = hitRead.get_AHDCHits(); - if(simulation){ - ArrayList TrueAHDC_Hits = hitRead.get_TrueAHDCHits(); - } - //System.out.println("AHDC_Hits size " + AHDC_Hits.size()); - - // II) Create PreCluster - ArrayList AHDC_PreClusters = new ArrayList<>(); - PreClusterFinder preclusterfinder = new PreClusterFinder(); - preclusterfinder.findPreCluster(AHDC_Hits); - AHDC_PreClusters = preclusterfinder.get_AHDCPreClusters(); - //System.out.println("AHDC_PreClusters size " + AHDC_PreClusters.size()); - - // III) Create Cluster - ClusterFinder clusterfinder = new ClusterFinder(); - clusterfinder.findCluster(AHDC_PreClusters); - ArrayList AHDC_Clusters = clusterfinder.get_AHDCClusters(); - //System.out.println("AHDC_Clusters size " + AHDC_Clusters.size()); - - // IV) Track Finder - ArrayList AHDC_Tracks = new ArrayList<>(); - ArrayList predictions = new ArrayList<>(); - - // If there is too much hits, we rely on to the conventional track finding - if (AHDC_Hits.size() > 300) mode = Mode.CV_Track_Finding; - - if (mode == Mode.CV_Track_Finding) { - if (findingMethod.equals("distance")) { - // IV) a) Distance method - //System.out.println("using distance"); - Distance distance = new Distance(); - distance.find_track(AHDC_Clusters); - AHDC_Tracks = distance.get_AHDCTracks(); - } else if (findingMethod.equals("hough")) { - // IV) b) Hough Transform method - //System.out.println("using hough"); - HoughTransform houghtransform = new HoughTransform(); - houghtransform.find_tracks(AHDC_Clusters); - AHDC_Tracks = houghtransform.get_AHDCTracks(); - } - } - if (mode == Mode.AI_Track_Finding) { - // AI --------------------------------------------------------------------------------- - AHDC_Hits.sort(new Comparator() { - @Override - public int compare(Hit a1, Hit a2) { - return Double.compare(a1.getRadius(), a2.getRadius()); - } - }); - PreClustering preClustering = new PreClustering(); - ArrayList preClustersAI = preClustering.find_preclusters_for_AI(AHDC_Hits); - ArrayList preclusterSuperlayers = preClustering.merge_preclusters(preClustersAI); - TrackConstruction trackConstruction = new TrackConstruction(); - ArrayList> tracks = new ArrayList<>(); - boolean sucess = trackConstruction.get_all_possible_track(preclusterSuperlayers, tracks); - - if (!sucess) { - System.err.println("Too much tracks candidates, exit"); - return false; - } - - try { - AIPrediction aiPrediction = new AIPrediction(); - predictions = aiPrediction.prediction(tracks, model.getModel()); - } catch (Exception e) { - throw new RuntimeException(e); - } - - for (TrackPrediction t : predictions) { - if (t.getPrediction() > 0.2) - AHDC_Tracks.add(new Track(t.getClusters())); - } - } - // ------------------------------------------------------------------------------------ - - - //Temporary track method ONLY for MC with no background; - //AHDC_Tracks.add(new Track(AHDC_Hits)); - - // V) Global fit - for (Track track : AHDC_Tracks) { - int nbOfPoints = track.get_Clusters().size(); - - double[][] szPos = new double[nbOfPoints][3]; - - int j = 0; - for (Cluster cluster : track.get_Clusters()) { - szPos[j][0] = cluster.get_X(); - szPos[j][1] = cluster.get_Y(); - szPos[j][2] = cluster.get_Z(); - j++; - } - - HelixFitJava h = new HelixFitJava(); - track.setPositionAndMomentum(h.HelixFit(nbOfPoints, szPos, 1)); - // double p = 150.0;//MeV/c - // double phi = Math.atan2(szPos[0][1], szPos[0][0]); - // double x_0[] = {0.0, 0.0, 0.0, p*Math.sin(phi), p*Math.cos(phi), 0.0}; - // track.setPositionAndMomentumVec(x_0); - } - - // VI) Kalman Filter - // System.out.println("AHDC_Tracks = " + AHDC_Tracks); - KalmanFilter kalmanFitter = new KalmanFilter(AHDC_Tracks, event, simulation); - // VII) Write bank - RecoBankWriter writer = new RecoBankWriter(); - - DataBank recoHitsBank = writer.fillAHDCHitsBank(event, AHDC_Hits); - DataBank recoPreClusterBank = writer.fillPreClustersBank(event, AHDC_PreClusters); - DataBank recoClusterBank = writer.fillClustersBank(event, AHDC_Clusters); - DataBank recoTracksBank = writer.fillAHDCTrackBank(event, AHDC_Tracks); - DataBank recoKFTracksBank = writer.fillAHDCKFTrackBank(event, AHDC_Tracks); - DataBank AIPredictionBanks = writer.fillAIPrediction(event, predictions); - - event.appendBank(recoHitsBank); - event.appendBank(recoPreClusterBank); - event.appendBank(recoClusterBank); - event.appendBank(recoTracksBank); - event.appendBank(recoKFTracksBank); - event.appendBank(AIPredictionBanks); - - if (simulation) { - DataBank recoMCBank = writer.fillAHDCMCTrackBank(event); - event.appendBank(recoMCBank); - } - - - } - return true; - } - - public static void main(String[] args) { - - double starttime = System.nanoTime(); - - int nEvent = 0; - int maxEvent = 10; - int myEvent = 3; - String inputFile = "merged_10.hipo"; - String outputFile = "output.hipo"; - - if (new File(outputFile).delete()) System.out.println("output.hipo is delete."); - - System.err.println(" \n[PROCESSING FILE] : " + inputFile); - - AHDCEngine en = new AHDCEngine(); - - HipoDataSource reader = new HipoDataSource(); - HipoDataSync writer = new HipoDataSync(); - - en.init(); - - reader.open(inputFile); - writer.open(outputFile); - - while (reader.hasEvent() && nEvent < maxEvent) { - nEvent++; - // if (nEvent % 100 == 0) System.out.println("nEvent = " + nEvent); - DataEvent event = reader.getNextEvent(); - - // if (nEvent != myEvent) continue; - // System.out.println("*********** NEXT EVENT ************"); - // event.show(); - - en.processDataEvent(event); - writer.writeEvent(event); - - } - writer.close(); - - System.out.println("finished " + (System.nanoTime() - starttime) * Math.pow(10, -9)); - } -} diff --git a/reconstruction/alert/src/main/java/org/jlab/service/ahdc/AHDCEngine.java b/reconstruction/alert/src/main/java/org/jlab/service/ahdc/AHDCEngine.java new file mode 100644 index 0000000000..e3f3bc3349 --- /dev/null +++ b/reconstruction/alert/src/main/java/org/jlab/service/ahdc/AHDCEngine.java @@ -0,0 +1,309 @@ +package org.jlab.service.ahdc; + +import org.jlab.clas.reco.ReconstructionEngine; +import org.jlab.clas.tracking.kalmanfilter.Material; +import org.jlab.io.base.DataBank; +import org.jlab.io.base.DataEvent; +import org.jlab.io.hipo.HipoDataSource; +import org.jlab.io.hipo.HipoDataSync; +import org.jlab.rec.ahdc.AI.*; +import org.jlab.rec.ahdc.Banks.RecoBankWriter; +import org.jlab.rec.ahdc.Cluster.Cluster; +import org.jlab.rec.ahdc.Cluster.ClusterFinder; +import org.jlab.rec.ahdc.Distance.Distance; +import org.jlab.rec.ahdc.HelixFit.HelixFitJava; +import org.jlab.rec.ahdc.Hit.Hit; +import org.jlab.rec.ahdc.Hit.HitReader; +import org.jlab.rec.ahdc.Hit.TrueHit; +import org.jlab.rec.ahdc.HoughTransform.HoughTransform; +import org.jlab.rec.ahdc.KalmanFilter.KalmanFilter; +import org.jlab.rec.ahdc.KalmanFilter.MaterialMap; +import org.jlab.rec.ahdc.PreCluster.PreCluster; +import org.jlab.rec.ahdc.PreCluster.PreClusterFinder; +import org.jlab.rec.ahdc.Track.Track; +import org.jlab.rec.ahdc.Mode; +import org.jlab.rec.alert.constants.CalibrationConstantsLoader; + +import java.io.File; +import java.util.*; + +/** AHDCEngine reconstruction service. + * + * AHDC Reconstruction using only AHDC information. + * + * Reconstruction utilizing other detectors (i.e. ATOF) are + * implemented in ALERTEngine. + * + */ +public class AHDCEngine extends ReconstructionEngine { + + private boolean simulation; + + /** + * String name for track seedsfinding method. + * Options are: + * - "distance" + * - "hough" + * + * \todo remove bool use_AI_for_trackfinding and use option string above. + */ + private String findingMethod; + + /// Material Map used by Kalman filter + private HashMap materialMap; + + /// \todo better name... Model of what? + private Model model; + + /// \todo better name... mode for what? + private Mode mode = Mode.CV_Track_Finding; + + public AHDCEngine() { + super("ALERT", "ouillon", "1.0.1"); + } + + @Override + public boolean init() { + simulation = false; + findingMethod = "distance"; + + if (materialMap == null) { + materialMap = MaterialMap.generateMaterials(); + } + + if(this.getEngineConfigString("Mode")!=null) { + if (Objects.equals(this.getEngineConfigString("Mode"), Mode.AI_Track_Finding.name())) + mode = Mode.AI_Track_Finding; + + if (Objects.equals(this.getEngineConfigString("Mode"), Mode.CV_Track_Finding.name())) + mode = Mode.CV_Track_Finding; + } + + if (mode == Mode.AI_Track_Finding) { + model = new Model(); + } + + // Requires calibration constants + String[] alertTables = new String[] { + "/calibration/alert/ahdc/time_offsets", + "/calibration/alert/ahdc/time_to_distance", + "/calibration/alert/atof/effective_velocity", + "/calibration/alert/atof/time_walk", + "/calibration/alert/atof/attenuation", + "/calibration/alert/atof/time_offsets" + }; + requireConstants(Arrays.asList(alertTables)); + + return true; + } + + int Run = -1; + + @Override + public boolean processDataEvent(DataEvent event) { + + int runNo = 10; // needed here? + int eventNo = 777; // same + + double magfield = 50.0; // what is this? + double magfieldfactor = 1; // why is this here? + + if (event.hasBank("RUN::config")) { + DataBank bank = event.getBank("RUN::config"); + runNo = bank.getInt("run", 0); + eventNo = bank.getInt("event", 0); + magfieldfactor = bank.getFloat("solenoid", 0); + if (runNo <= 0) { + System.err.println("AHDCEngine: got run <= 0 in RUN::config, skipping event."); + return false; + } + int newRun = Run; + newRun = runNo; + // Load the constants + //------------------- + if(Run!=newRun) { + CalibrationConstantsLoader.Load(newRun,"default",this.getConstantsManager()); + Run = newRun; + } + } + + /// What is this? + magfield = 50 * magfieldfactor; + + if (event.hasBank("AHDC::adc")) { + // I) Read raw hit + HitReader hitRead = new HitReader(event, simulation); + + ArrayList AHDC_Hits = hitRead.get_AHDCHits(); + if(simulation){ + ArrayList TrueAHDC_Hits = hitRead.get_TrueAHDCHits(); + } + //System.out.println("AHDC_Hits size " + AHDC_Hits.size()); + + // II) Create PreCluster + ArrayList AHDC_PreClusters = new ArrayList<>(); + PreClusterFinder preclusterfinder = new PreClusterFinder(); + preclusterfinder.findPreCluster(AHDC_Hits); + AHDC_PreClusters = preclusterfinder.get_AHDCPreClusters(); + //System.out.println("AHDC_PreClusters size " + AHDC_PreClusters.size()); + + // III) Create Cluster + ClusterFinder clusterfinder = new ClusterFinder(); + clusterfinder.findCluster(AHDC_PreClusters); + ArrayList AHDC_Clusters = clusterfinder.get_AHDCClusters(); + //System.out.println("AHDC_Clusters size " + AHDC_Clusters.size()); + + // IV) Track Finder + ArrayList AHDC_Tracks = new ArrayList<>(); + ArrayList predictions = new ArrayList<>(); + + // If there is too much hits, we rely on to the conventional track finding + if (AHDC_Hits.size() > 300) mode = Mode.CV_Track_Finding; + + if (mode == Mode.CV_Track_Finding) { + if (findingMethod.equals("distance")) { + // IV) a) Distance method + //System.out.println("using distance"); + Distance distance = new Distance(); + distance.find_track(AHDC_Clusters); + AHDC_Tracks = distance.get_AHDCTracks(); + } else if (findingMethod.equals("hough")) { + // IV) b) Hough Transform method + //System.out.println("using hough"); + HoughTransform houghtransform = new HoughTransform(); + houghtransform.find_tracks(AHDC_Clusters); + AHDC_Tracks = houghtransform.get_AHDCTracks(); + } + } + if (mode == Mode.AI_Track_Finding) { + // AI --------------------------------------------------------------------------------- + AHDC_Hits.sort(new Comparator() { + @Override + public int compare(Hit a1, Hit a2) { + return Double.compare(a1.getRadius(), a2.getRadius()); + } + }); + PreClustering preClustering = new PreClustering(); + ArrayList preClustersAI = preClustering.find_preclusters_for_AI(AHDC_Hits); + ArrayList preclusterSuperlayers = preClustering.merge_preclusters(preClustersAI); + TrackConstruction trackConstruction = new TrackConstruction(); + ArrayList> tracks = new ArrayList<>(); + boolean sucess = trackConstruction.get_all_possible_track(preclusterSuperlayers, tracks); + + if (!sucess) { + System.err.println("Too much tracks candidates, exit"); + return false; + } + + try { + AIPrediction aiPrediction = new AIPrediction(); + predictions = aiPrediction.prediction(tracks, model.getModel()); + } catch (Exception e) { + throw new RuntimeException(e); + } + + for (TrackPrediction t : predictions) { + if (t.getPrediction() > 0.2) + AHDC_Tracks.add(new Track(t.getClusters())); + } + } + // ------------------------------------------------------------------------------------ + + + //Temporary track method ONLY for MC with no background; + //AHDC_Tracks.add(new Track(AHDC_Hits)); + + // V) Global fit + for (Track track : AHDC_Tracks) { + int nbOfPoints = track.get_Clusters().size(); + + double[][] szPos = new double[nbOfPoints][3]; + + int j = 0; + for (Cluster cluster : track.get_Clusters()) { + szPos[j][0] = cluster.get_X(); + szPos[j][1] = cluster.get_Y(); + szPos[j][2] = cluster.get_Z(); + j++; + } + + HelixFitJava h = new HelixFitJava(); + track.setPositionAndMomentum(h.HelixFit(nbOfPoints, szPos, 1)); + // double p = 150.0;//MeV/c + // double phi = Math.atan2(szPos[0][1], szPos[0][0]); + // double x_0[] = {0.0, 0.0, 0.0, p*Math.sin(phi), + // p*Math.cos(phi), 0.0}; track.setPositionAndMomentumVec(x_0); + } + + // VI) Kalman Filter + // System.out.println("AHDC_Tracks = " + AHDC_Tracks); + KalmanFilter kalmanFitter = new KalmanFilter(AHDC_Tracks, event, simulation); + // VII) Write bank + RecoBankWriter writer = new RecoBankWriter(); + + DataBank recoHitsBank = writer.fillAHDCHitsBank(event, AHDC_Hits); + DataBank recoPreClusterBank = writer.fillPreClustersBank(event, AHDC_PreClusters); + DataBank recoClusterBank = writer.fillClustersBank(event, AHDC_Clusters); + DataBank recoTracksBank = writer.fillAHDCTrackBank(event, AHDC_Tracks); + DataBank recoKFTracksBank = writer.fillAHDCKFTrackBank(event, AHDC_Tracks); + DataBank AIPredictionBanks = writer.fillAIPrediction(event, predictions); + + event.appendBank(recoHitsBank); + event.appendBank(recoPreClusterBank); + event.appendBank(recoClusterBank); + event.appendBank(recoTracksBank); + event.appendBank(recoKFTracksBank); + event.appendBank(AIPredictionBanks); + + if (simulation) { + DataBank recoMCBank = writer.fillAHDCMCTrackBank(event); + event.appendBank(recoMCBank); + } + + + } + return true; + } + + public static void main(String[] args) { + + double starttime = System.nanoTime(); + + int nEvent = 0; + int maxEvent = 10; + int myEvent = 3; + String inputFile = "merged_10.hipo"; + String outputFile = "output.hipo"; + + if (new File(outputFile).delete()) System.out.println("output.hipo is delete."); + + System.err.println(" \n[PROCESSING FILE] : " + inputFile); + + AHDCEngine en = new AHDCEngine(); + + HipoDataSource reader = new HipoDataSource(); + HipoDataSync writer = new HipoDataSync(); + + en.init(); + + reader.open(inputFile); + writer.open(outputFile); + + while (reader.hasEvent() && nEvent < maxEvent) { + nEvent++; + // if (nEvent % 100 == 0) System.out.println("nEvent = " + nEvent); + DataEvent event = reader.getNextEvent(); + + // if (nEvent != myEvent) continue; + // System.out.println("*********** NEXT EVENT ************"); + // event.show(); + + en.processDataEvent(event); + writer.writeEvent(event); + + } + writer.close(); + + System.out.println("finished " + (System.nanoTime() - starttime) * Math.pow(10, -9)); + } +} diff --git a/reconstruction/alert/src/main/java/org/jlab/service/alert/ALERTEngine.java b/reconstruction/alert/src/main/java/org/jlab/service/alert/ALERTEngine.java new file mode 100644 index 0000000000..b6e6851ec8 --- /dev/null +++ b/reconstruction/alert/src/main/java/org/jlab/service/alert/ALERTEngine.java @@ -0,0 +1,177 @@ +package org.jlab.service.alert; + +import java.util.ArrayList; +import java.util.concurrent.atomic.AtomicInteger; +import java.io.File; +import java.util.*; + +import org.jlab.io.base.DataBank; +import org.jlab.io.base.DataEvent; +import org.jlab.io.hipo.HipoDataSource; +import org.jlab.io.hipo.HipoDataSync; + +import org.jlab.clas.reco.ReconstructionEngine; +import org.jlab.clas.tracking.kalmanfilter.Material; +import org.jlab.clas.swimtools.Swim; + +import org.jlab.rec.alert.banks.RecoBankWriter; +import org.jlab.rec.alert.projections.TrackProjector; + + +/** + *

ALERTEngine reconstruction service.

+ * + * @author Whit Armstrong + * @author Noemie Pilleux + * @since 2025-04-03 + */ +public class ALERTEngine extends ReconstructionEngine { + + /** + * ALERT Engine output bank writer. + * + * @see RecoBankWriter + * + *

Output banks

+ *
    + *
  • Track Projection @see TrackProjector
  • + *
+ * + */ + private RecoBankWriter rbc; + + /** + * Current run number being processed. + * TODO: why atomic here and nowhere else? + */ + private final AtomicInteger run = new AtomicInteger(0); + + private double b; //Magnetic field + + public void setB(double B) { + this.b = B; + } + public double getB() { + return b; + } + + /** + * ALERTEngine service c'tor. + */ + public ALERTEngine() { + super("ALERT", "whit,ouillon,pilleux", "0.1"); + } + + /** + * ALERTEngine initialization. + * Creates the RecoBankWriter and checks for various yaml flags. + * TODO: document flags + */ + @Override + public boolean init() { + + rbc = new RecoBankWriter(); + + + if(this.getEngineConfigString("Mode")!=null) { + //if (Objects.equals(this.getEngineConfigString("Mode"), Mode.AI_Track_Finding.name())) + // mode = Mode.AI_Track_Finding; + } + return true; + } + + /** + * Process Event. + * Main method called to process event data. + * + *
    + *
  • Check for AHDC and ATOF banks
  • + *
  • Project track to ATOF
  • + *
+ */ + @Override + public boolean processDataEvent(DataEvent event) { + + if (!event.hasBank("AHDC::adc")) + return false; + if (!event.hasBank("ATOF::tdc")) + return false; + + if (!event.hasBank("RUN::config")) { + return true; + } + + DataBank bank = event.getBank("RUN::config"); + + int newRun = bank.getInt("run", 0); + if (newRun == 0) { + return true; + } + + if (run.get() == 0 || (run.get() != 0 && run.get() != newRun)) { + run.set(newRun); + } + + //Do we need to read the event vx,vy,vz? + //If not, this part can be moved in the initialization of the engine. + double eventVx=0,eventVy=0,eventVz=0; //They should be in CM + //Track Projector Initialisation with b field + Swim swim = new Swim(); + float magField[] = new float[3]; + swim.BfieldLab(eventVx, eventVy, eventVz, magField); + this.b = Math.sqrt(Math.pow(magField[0],2) + Math.pow(magField[1],2) + Math.pow(magField[2],2)); + + TrackProjector projector = new TrackProjector(); + projector.setB(this.b); + projector.projectTracks(event); + rbc.appendMatchBanks(event, projector.getProjections()); + + return true; + } + + /** + * ALERTEngine main. + * TODO: needs good test. + */ + public static void main(String[] args) { + + double starttime = System.nanoTime(); + + int nEvent = 0; + int maxEvent = 1000; + int myEvent = 3; + String inputFile = "alert_out_update.hipo"; + String outputFile = "output.hipo"; + + if (new File(outputFile).delete()) System.out.println("output.hipo is delete."); + + System.err.println(" \n[PROCESSING FILE] : " + inputFile); + + ALERTEngine en = new ALERTEngine(); + + HipoDataSource reader = new HipoDataSource(); + HipoDataSync writer = new HipoDataSync(); + + en.init(); + + reader.open(inputFile); + writer.open(outputFile); + + while (reader.hasEvent() && nEvent < maxEvent) { + nEvent++; + // if (nEvent % 100 == 0) System.out.println("nEvent = " + nEvent); + DataEvent event = reader.getNextEvent(); + + // if (nEvent != myEvent) continue; + // System.out.println("*********** NEXT EVENT ************"); + // event.show(); + + en.processDataEvent(event); + writer.writeEvent(event); + + } + writer.close(); + + System.out.println("finished " + (System.nanoTime() - starttime) * Math.pow(10, -9)); + } +} diff --git a/reconstruction/alert/src/main/java/org/jlab/rec/service/ATOFEngine.java b/reconstruction/alert/src/main/java/org/jlab/service/atof/ATOFEngine.java similarity index 77% rename from reconstruction/alert/src/main/java/org/jlab/rec/service/ATOFEngine.java rename to reconstruction/alert/src/main/java/org/jlab/service/atof/ATOFEngine.java index 19f27e5aa8..c409355a04 100644 --- a/reconstruction/alert/src/main/java/org/jlab/rec/service/ATOFEngine.java +++ b/reconstruction/alert/src/main/java/org/jlab/service/atof/ATOFEngine.java @@ -18,7 +18,7 @@ import org.jlab.rec.atof.hit.ATOFHit; import org.jlab.rec.atof.hit.BarHit; import org.jlab.rec.atof.hit.HitFinder; -import org.jlab.rec.alert.projections.TrackProjector; +//import org.jlab.rec.alert.projections.TrackProjector; /** * Service to return reconstructed ATOF hits and clusters @@ -69,18 +69,22 @@ public boolean processDataEvent(DataEvent event) { run.set(newRun); } - //Do we need to read the event vx,vy,vz? - //If not, this part can be moved in the initialization of the engine. - double eventVx=0,eventVy=0,eventVz=0; //They should be in CM - //Track Projector Initialisation with b field - Swim swim = new Swim(); - float magField[] = new float[3]; - swim.BfieldLab(eventVx, eventVy, eventVz, magField); - this.b = Math.sqrt(Math.pow(magField[0],2) + Math.pow(magField[1],2) + Math.pow(magField[2],2)); - TrackProjector projector = new TrackProjector(); - projector.setB(this.b); - projector.projectTracks(event); - rbc.appendMatchBanks(event, projector.getProjections()); + ////Do we need to read the event vx,vy,vz? + ////If not, this part can be moved in the initialization of the engine. + //double eventVx=0,eventVy=0,eventVz=0; //They should be in CM + ////Track Projector Initialisation with b field + //Swim swim = new Swim(); + //float magField[] = new float[3]; + //swim.BfieldLab(eventVx, eventVy, eventVz, magField); + //this.b = Math.sqrt(Math.pow(magField[0],2) + Math.pow(magField[1],2) + Math.pow(magField[2],2)); + + ///// \todo move this to ALERTEngine + //TrackProjector projector = new TrackProjector(); + //projector.setB(this.b); + //projector.projectTracks(event); + //rbc.appendMatchBanks(event, projector.getProjections()); + + // Why do we have to "find" hits? //Hit finder init HitFinder hitfinder = new HitFinder(); hitfinder.findHits(event, ATOF);