Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
package org.jlab.analysis.eventmerger;
import java.util.LinkedHashMap;
import java.util.Map;
import org.jlab.jnp.hipo4.data.*;
import org.jlab.jnp.hipo4.io.HipoReader;
import org.jlab.jnp.utils.data.TextHistogram;

/**
* Hipo Reduce Worker: filter event based on bank size
*
* Inputs: bank name and number of rows
* Returns "true" if the size of the selected bank is greater than the given
* value or the bank name is an empty string, or "false" otherwise
* @author devita
*/
public class FilterBankSize implements Worker {

private Bank filterBank = null;
private String bankName = null;
private int nRows = -1;
private int[] rowBuffer = new int[21];
private int rowMax = 500;

public FilterBankSize(String bankName,int nRows){
this.bankName = bankName;
this.nRows = nRows;
System.out.println("\nInitializing bank size reduction: bank set to " + this.bankName + " with minimum rows set to " + this.nRows + "\n");
}

/**
* Initialize bank schema
*
* @param reader
*/
@Override
public void init(HipoReader reader) {
if(!bankName.isEmpty())
filterBank = new Bank(reader.getSchemaFactory().getSchema(bankName));
}

/**
* Event filter: select events according to trigger bit
*
* @param event
* @return
*/
@Override
public boolean processEvent(Event event) {

if(filterBank==null) return true;

event.read(filterBank);
double value = (double) filterBank.getRows();

// fill statistics array
int rowBins = rowBuffer.length-1;
if(value>rowMax){
rowBuffer[rowBins] = rowBuffer[rowBins] + 1;
} else{
int bin = (int) (rowBins*value/(rowMax));
rowBuffer[bin] = rowBuffer[bin] + 1;
}

return filterBank.getRows()>this.nRows;
}

// This function has to be implemented, but not used if
// HipoStream is not trying to classify the events.
@Override
public long clasifyEvent(Event event) { return 0L; }

/**
* Get Map of beam current values
* @return
*/
public Map<String,Double> getBankSizeMap(){
Map<String,Double> sizeMap = new LinkedHashMap<>();
int rowBins = rowBuffer.length-1;
double step = ((double) rowMax)/rowBins;
for(int i = 0; i < rowBins; i++){
String key = String.format("[%6.1f -%6.1f]", (i*step),(i+1)*step);
sizeMap.put(key, (double) rowBuffer[i]);
}
sizeMap.put("overflow", (double) rowBuffer[rowBins] );
return sizeMap;
}

/**
* Show beam current histogram
*/
public void showStats() {
if(filterBank==null) return;
System.out.println("\n\n");
System.out.println(bankName.toUpperCase() + " BANK SIZE HISTOGRAM (ENTRIES ARE EVENTS)\n");
TextHistogram histo = new TextHistogram();
Map<String,Double> sizeMap = this.getBankSizeMap();
histo.setPrecision(0);
histo.setMinDecriptorWidth(28);
histo.setWidth(80);
histo.setData(sizeMap);
histo.print();
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package org.jlab.analysis.eventmerger;
import java.util.LinkedHashMap;
import java.util.Map;
import org.jlab.detector.epics.EpicsSequence;
import org.jlab.detector.scalers.DaqScalersSequence;
import org.jlab.jnp.hipo4.data.*;
import org.jlab.jnp.hipo4.io.HipoReader;
Expand All @@ -17,18 +18,33 @@

public class FilterFcup implements Worker {

Bank runConfigBank = null;
DaqScalersSequence chargeSeq = null;
private double charge = -1;
private double current = -1;
private int[] currentBuffer = new int[21];
private int currentMax = 80;
public final static String FCUP_SCALER = "DSC2";
private Bank runConfigBank = null;
private DaqScalersSequence scalerSeq = null;
private EpicsSequence epicsSeq = null;
private double currentMin = -1;
private double currentMax = 80;
private String source = null;
private int[] histoBuffer = new int[21];
private int histoMax = 80;

public FilterFcup(double current){
this.current=current;
System.out.println("\nInitializing Faraday Cup reduction: threshold current set to " + this.current + "\n");
public FilterFcup(double min, double max, String source){
this.currentMin=min;
this.currentMax=max;
if(currentMax < Double.POSITIVE_INFINITY)
this.histoMax=(int) (2*max);
this.source=source;
System.out.print("\nInitializing Faraday Cup reduction: current range set to " + this.currentMin + " - " + this.currentMax);
System.out.print("\n current source set to " + (this.source.equals(FCUP_SCALER) ? source : "RAW:epics."+source) + "\n");
}

public FilterFcup(double min, double max){
this(min, max, FCUP_SCALER);
}

public FilterFcup(double min){
this(min, Double.MAX_VALUE, FCUP_SCALER);
}


/**
* Initialize bank schema
Expand All @@ -46,7 +62,16 @@ public void init(HipoReader reader) {
* @param sequence
*/
public void setScalerSequence(DaqScalersSequence sequence) {
this.chargeSeq=sequence;
this.scalerSeq=sequence;
}

/**
* Set sequence of Epics readings
*
* @param sequence
*/
public void setEpicsSequence(EpicsSequence sequence) {
this.epicsSeq=sequence;
}

/**
Expand All @@ -61,23 +86,30 @@ public boolean processEvent(Event event) {

if(runConfigBank.getRows()>0){
long timeStamp = runConfigBank.getLong("timestamp",0);
int unixTime = runConfigBank.getInt("unixtime",0);

// get beam current
double value=chargeSeq.getInterval(timeStamp).getBeamCurrent();
double value=0;
if(source.equals(FCUP_SCALER))
value = scalerSeq.getInterval(timeStamp).getBeamCurrent();
else {
if(epicsSeq.get(unixTime)!=null)
value = epicsSeq.getMinimum(source, 0, unixTime);
}

// fill statistics array
int currentBins = currentBuffer.length-1;
if(value>currentMax){
currentBuffer[currentBins] = currentBuffer[currentBins] + 1;
int currentBins = histoBuffer.length-1;
if(value>histoMax){
histoBuffer[currentBins] = histoBuffer[currentBins] + 1;
} else if(value<0){
currentBuffer[0] = currentBuffer[0];
histoBuffer[0] = histoBuffer[0];
} else{
int bin = (int) (currentBins*value/(currentMax));
currentBuffer[bin] = currentBuffer[bin] + 1;
int bin = (int) (currentBins*value/(histoMax));
histoBuffer[bin] = histoBuffer[bin] + 1;
}

// set filter value
if(value>current) return true;
if(value>currentMin && value<currentMax) return true;
}
return false;
}
Expand All @@ -92,14 +124,14 @@ public boolean processEvent(Event event) {
* @return
*/
public Map<String,Double> getCurrentMap(){
Map<String,Double> sizeMap = new LinkedHashMap<String,Double>();
int currentBins = currentBuffer.length-1;
double step = ((double) currentMax)/currentBins;
Map<String,Double> sizeMap = new LinkedHashMap<>();
int currentBins = histoBuffer.length-1;
double step = ((double) histoMax)/currentBins;
for(int i = 0; i < currentBins; i++){
String key = String.format("[%6.1f -%6.1f]", (i*step),(i+1)*step);
sizeMap.put(key, (double) currentBuffer[i]);
sizeMap.put(key, (double) histoBuffer[i]);
}
sizeMap.put("overflow", (double) currentBuffer[currentBins] );
sizeMap.put("overflow", (double) histoBuffer[currentBins] );
return sizeMap;
}

Expand All @@ -108,7 +140,7 @@ public Map<String,Double> getCurrentMap(){
*/
public void showStats() {
System.out.println("\n\n");
System.out.println(" BEAM CURRENT HISTOGRAM (ENTRIES ARE EVENTS)\n");
System.out.println(" BEAM CURRENT HISTOGRAM BEFORE FILTER (ENTRIES ARE EVENTS)\n");
TextHistogram histo = new TextHistogram();
Map<String,Double> sizeMap = this.getCurrentMap();
histo.setPrecision(0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,25 @@

/**
* Hipo Reduce Worker: filter event based on trigger bit
*
* Inputs: selected trigger bit (0-63)
* Returns "true" is selected bit is set in the trigger bit word and no other bits are set

Inputs: selected and vetoed trigger bit masks (64 selectedBits)
Returns "true" if one of the bits in the selectedBits mask is set in the trigger
word and none of the bit in the vetoedBits mask is
* @author devita
*/
public class FilterTrigger implements Worker {

Bank triggerBank = null;
DaqScalersSequence chargeSeq = null;
int bit = -1;
long selectedBits = 0L;
long vetoedBits = 0L;

public FilterTrigger(int bit){
this.bit=bit;
System.out.println("\nInitializing trigger reduction: bit set to " + this.bit + "\n");
public FilterTrigger(long bits, long veto){
this.selectedBits=bits;
this.vetoedBits=veto;
System.out.println("\nInitializing trigger reduction:");
System.out.println("\t selected bit mask set to 0x" + Long.toHexString(bits));
System.out.println("\t vetoed bit mask set to 0x" + Long.toHexString(veto));
}

/**
Expand All @@ -43,13 +48,8 @@ public boolean processEvent(Event event) {
event.read(triggerBank);
if(triggerBank.getRows()>0){
long triggerBit = triggerBank.getLong("trigger",0);
long timeStamp = triggerBank.getLong("timestamp",0);
// Value will be >0 if selected bit is 1 in triggerBit
int value = DataByteUtils.readLong(triggerBit, bit, bit);
// Check that no other bit is set
long thisBit = value*((long) Math.pow(2, bit));
// If returned true, the event will be write to the output
if(value>0 && thisBit==triggerBit) return true;
if((triggerBit & selectedBits) !=0L && (triggerBit & vetoedBits) == 0L) return true;
}
return false;
}
Expand Down
Loading