Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
88a37c8
add convenience method
baltzell Apr 22, 2026
81a942f
remove unused stuff
baltzell Apr 22, 2026
5b04c7f
add decoding engine
baltzell Apr 22, 2026
e007bff
add it to the clara yaml
baltzell Apr 22, 2026
4b4f4ed
use a pool
baltzell Apr 23, 2026
7533d1b
hmm
baltzell Apr 23, 2026
d2effb6
allow decoder instances to share ConstantsManagers
baltzell Apr 23, 2026
813df72
share ConstantsManagers
baltzell Apr 23, 2026
b67e68e
kludge test
baltzell Apr 23, 2026
a27e855
cleanup
baltzell Apr 23, 2026
b11aae4
try this
baltzell Apr 23, 2026
1ac9d03
inherit ConstantsManagers
baltzell Apr 23, 2026
3f60c48
cleanup
baltzell Apr 23, 2026
d788f4a
only check tables if not shared
baltzell Apr 23, 2026
7ce4da4
higher ports on macos, cleanup process dpe process
baltzell Apr 23, 2026
5274c3d
remove ineffective pid trap
baltzell Apr 23, 2026
ba52c75
Revert "higher ports on macos, cleanup process dpe process"
baltzell Apr 24, 2026
269ff08
Revert "fix job name"
baltzell Apr 24, 2026
030152d
Revert "decouple ubuntu/macos builds to reduce wait"
baltzell Apr 24, 2026
2e3fefe
Revert "remove unnecessary reader class"
baltzell Apr 24, 2026
676df4d
Revert "remove example engine"
baltzell Apr 24, 2026
8986cf4
restore reported data type
baltzell Apr 25, 2026
5a693e3
rename class
baltzell Apr 27, 2026
756087d
undo
baltzell Apr 27, 2026
4036a67
fix rebase oops
baltzell Apr 27, 2026
ac20ac1
rename class
baltzell Apr 27, 2026
d944fa4
fix rebase oops
baltzell Apr 27, 2026
5a1ff60
cleanup
baltzell Apr 27, 2026
7b33a15
cleanup
baltzell Apr 27, 2026
313b4be
stf
baltzell Apr 27, 2026
4b3b4e6
just events, not per thread
baltzell Apr 28, 2026
84d0662
reduce constants sharing
baltzell Apr 28, 2026
da0aa0d
bugfix
baltzell Apr 28, 2026
159eb5e
share one RCDBManager
baltzell Apr 28, 2026
0ed9fa6
bugfix
baltzell Apr 28, 2026
8c6cb24
restore
baltzell May 9, 2026
2c95583
DecoderEngine: just pass along HIPO events
baltzell May 11, 2026
24bc494
add Clas12Reader, no decoding
baltzell May 11, 2026
fa2facf
dummy initialization value
baltzell May 11, 2026
b9ca5aa
bugfix
baltzell May 11, 2026
b768d23
split I/O service into separate PR
baltzell May 12, 2026
c29d9ba
split pull requests
baltzell May 12, 2026
40630b0
cleanup, avoid class variable
baltzell May 12, 2026
71d85f6
remove debugging leftover
baltzell May 12, 2026
b5d51dc
revert to hard-coded byte order
baltzell May 13, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public int readEventCount() throws EventReaderException {

@Override
public ByteOrder readByteOrder() throws EventReaderException {
return reader.getFileByteOrder();
return ByteOrder.LITTLE_ENDIAN; //reader.getFileByteOrder();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@

import org.jlab.io.base.DataEvent;
import org.jlab.io.evio.EvioDataEvent;
import org.jlab.io.hipo.HipoDataEvent;
import org.jlab.io.hipo.HipoDataSync;

import org.jlab.jnp.hipo4.data.Bank;
import org.jlab.jnp.hipo4.data.Event;
Expand All @@ -38,27 +36,28 @@ public class CLASDecoder {
protected SchemaFactory schemaFactory = new SchemaFactory();
private CodaEventDecoder codaDecoder = null;
private List<DetectorDataDgtz> dataList = new ArrayList<>();
private HipoDataSync writer = null;
private HipoDataEvent hipoEvent = null;
private boolean isRunNumberFixed = false;
private int decoderDebugMode = 0;
private ModeAHDC ahdcExtractor = new ModeAHDC();
private RCDBManager rcdbManager = new RCDBManager();
private static RCDBManager rcdbManager = new RCDBManager();

public CLASDecoder(boolean development){
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder(development);
writer = new HipoDataSync();
hipoEvent = (HipoDataEvent) writer.createEvent();
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}

public CLASDecoder(){
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder();
writer = new HipoDataSync();
hipoEvent = (HipoDataEvent) writer.createEvent();
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}

public CLASDecoder(CLASDecoder d) {
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder(d.detectorDecoder);
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}
Expand Down Expand Up @@ -792,4 +791,8 @@ public Event getDecodedEvent(EvioDataEvent rawEvent, int run, int counter, Doubl

return decodedEvent;
}

public Event getDecodedEvent(EvioDataEvent rawEvent) {
return getDecodedEvent(rawEvent, -1, -1, null, null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,17 @@ public DetectorEventDecoder(boolean development){
}
}

public DetectorEventDecoder(){
this.initDecoder();
}

public DetectorEventDecoder(DetectorEventDecoder d) {
translationManager = d.translationManager;
fitterManager = d.fitterManager;
scalerManager = d.scalerManager;
initDecoder(false);
}

public void setTimestamp(String timestamp) {
translationManager.setTimeStamp(timestamp);
fitterManager.setTimeStamp(timestamp);
Expand Down Expand Up @@ -80,10 +91,6 @@ public float getRcdbSolenoidScale() {
getValue()).floatValue();
}

public DetectorEventDecoder(){
this.initDecoder();
}

public final void initDecoderDev(){
keysTrans = Arrays.asList(new DetectorType[]{ DetectorType.HTCC,DetectorType.BST,DetectorType.RTPC} );
tablesTrans = Arrays.asList(new String[]{ "/daq/tt/clasdev/htcc","/daq/tt/clasdev/svt","/daq/tt/clasdev/rtpc" });
Expand All @@ -95,7 +102,11 @@ public final void initDecoderDev(){
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));
}

public final void initDecoder(){
public final void initDecoder() {
initDecoder(true);
}

public final void initDecoder(boolean initializeManagers){

// Detector translation table
keysTrans = Arrays.asList(new DetectorType[]{DetectorType.FTCAL,DetectorType.FTHODO,DetectorType.FTTRK,DetectorType.LTCC,DetectorType.ECAL,DetectorType.FTOF,
Expand All @@ -109,7 +120,6 @@ public final void initDecoder(){
"/daq/tt/rf","/daq/tt/bmt","/daq/tt/fmt","/daq/tt/rich2","/daq/tt/hel","/daq/tt/band","/daq/tt/rtpc",
"/daq/tt/raster","/daq/tt/atof","/daq/tt/ahdc"
});
translationManager.init(tablesTrans);

// ADC waveform fitter translation table
keysFitter = Arrays.asList(new DetectorType[]{DetectorType.FTCAL,DetectorType.FTHODO,DetectorType.FTTRK,DetectorType.FTOF,DetectorType.LTCC,
Expand All @@ -122,17 +132,20 @@ public final void initDecoder(){
"/daq/config/fmt","/daq/fadc/hel","/daq/fadc/rf","/daq/fadc/band","/daq/fadc/raster",
"/daq/config/ahdc"
});
fitterManager.init(tablesFitter);

// Data filter list
keysFilter = Arrays.asList(new DetectorType[]{DetectorType.DC});

scalerManager.init(Arrays.asList(new String[]{"/runcontrol/fcup","/runcontrol/slm","/runcontrol/hwp",
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));


keysMicromega = Arrays.asList(new DetectorType[]{DetectorType.BMT,DetectorType.FMT,DetectorType.FTTRK});

checkTables();
if (initializeManagers) {
translationManager.init(tablesTrans);
fitterManager.init(tablesFitter);
scalerManager.init(Arrays.asList(new String[]{"/runcontrol/fcup","/runcontrol/slm","/runcontrol/hwp",
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));
checkTables();
}

}

public void checkTables() {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
package org.jlab.clas.reco;

import java.util.Set;
import java.util.HashSet;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import org.jlab.clara.base.ClaraUtil;
import org.jlab.clara.engine.Engine;
import org.jlab.clara.engine.EngineData;
import org.jlab.clara.engine.EngineDataType;
import org.jlab.clara.engine.EngineStatus;
import org.jlab.detector.decode.CLASDecoder;
import org.jlab.io.evio.EvioDataEvent;
import org.jlab.io.hipo.HipoDataEvent;
import org.jlab.jnp.hipo4.data.SchemaFactory;
import org.json.JSONObject;

/**
*
* @author baltzell
*/
public class DecoderEngine implements Engine {

static final int POOL_SIZE = 64;
static final Set<EngineDataType> ED_TYPES = ClaraUtil.buildDataTypes(
Clas12Types.EVIO,Clas12Types.HIPO,EngineDataType.JSON,EngineDataType.STRING);

SchemaFactory schema;
BlockingQueue<CLASDecoder> pool;
int constantsShared = 16;

public DecoderEngine() {
schema = new SchemaFactory();
schema.initFromDirectory(System.getenv("CLAS12DIR") + "/etc/bankdefs/hipo4");
Comment thread
baltzell marked this conversation as resolved.
}

@Override
public Set<EngineDataType> getInputDataTypes() { return ED_TYPES; }
@Override
public Set<EngineDataType> getOutputDataTypes() { return ED_TYPES; }
@Override
public EngineData executeGroup(Set<EngineData> set) { return null; }
@Override
public Set<String> getStates() { return new HashSet<>(); }
@Override
public String getDescription() { return "decoder engine"; }
@Override
public String getVersion() { return "1.0"; }
@Override
public String getAuthor() { return "baltzell"; }
@Override
public void reset() {}
@Override
public void destroy() {}

@Override
public EngineData configure(EngineData ed) {
JSONObject json = new JSONObject(ed.getData());
pool = new ArrayBlockingQueue<>(POOL_SIZE);
CLASDecoder d0 = null;
for (int i=0; i<POOL_SIZE; i++) {
CLASDecoder d;
if (i % constantsShared == 0) {
d0 = new CLASDecoder();
if (json.has("variation")) d0.setVariation(json.getString("variation"));
if (json.has("timestamp")) d0.setVariation(json.getString("timestamp"));
d = d0;
}
else {
d = new CLASDecoder(d0);
}
pool.add(d);
}
return ed;
}

@Override
public EngineData execute(EngineData input) {

EngineData output = input;

// if it's EVIO, decode it, otherwise just pass it along
if (input.getMimeType().equals("binary/data-evio")) {
EvioDataEvent evio;
try {
ByteBuffer bb = (ByteBuffer) input.getData();
//evio = new EvioDataEvent(bb.array(), bb.order());
evio = new EvioDataEvent(bb.array(), ByteOrder.LITTLE_ENDIAN);
} catch (Exception e) {
String msg = String.format("Error reading input event%n%n%s", ClaraUtil.reportException(e));
output.setStatus(EngineStatus.ERROR);
output.setDescription(msg);
return output;
}
HipoDataEvent hipo;
try {
CLASDecoder d = pool.take();
hipo = new HipoDataEvent(d.getDecodedEvent(evio),schema);
pool.put(d);
output.setData("binary/data-hipo", hipo.getHipoEvent());
} catch (Exception e) {
String msg = String.format("Error processing input event%n%n%s", ClaraUtil.reportException(e));
output.setStatus(EngineStatus.ERROR);
output.setDescription(msg);
return output;
}
}

return output;
}
}
6 changes: 4 additions & 2 deletions etc/services/rgd-clarode.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,14 @@ configuration:
outputBankPrefix: "HB"
io-services:
reader:
class: org.jlab.io.clara.DecoderReader
name: DecoderReader
class: org.jlab.io.clara.EvioToEvioReader
name: EvioToEvioReader
writer:
class: org.jlab.io.clara.DecoderWriter
name: DecoderWriter
services:
- class: org.jlab.clas.reco.DecoderEngine
name: DECO
- class: org.jlab.service.ai.DCDenoiseEngine
name: DCDN
- class: org.jlab.clas.swimtools.MagFieldsEngine
Expand Down