1+ import json
12import logging
23import time
4+ import uuid
35
46import numpy as np
57from confluent_kafka import Producer
6- from streaming_data_types import serialise_ev44
8+ from streaming_data_types import serialise_6s4t , serialise_ev44 , serialise_pl72
9+ from streaming_data_types .run_start_pl72 import DetectorSpectrumMap
710
811logger = logging .getLogger ("saluki" )
912
1013RNG = np .random .default_rng ()
1114
1215
13- def generate_fake_ev44 (
16+ def generate_fake_events (
1417 msg_id : int ,
1518 events_per_frame : int ,
1619 tof_peak : float ,
1720 tof_sigma : float ,
1821 det_min : int ,
1922 det_max : int ,
2023) -> bytes :
21- detector_ids = np . random . randint (low = det_min , high = det_max , size = events_per_frame )
24+ detector_ids = RNG . integers (low = det_min , high = det_max , size = events_per_frame )
2225 tofs = np .maximum (0.0 , RNG .normal (loc = tof_peak , scale = tof_sigma , size = events_per_frame ))
2326
2427 return serialise_ev44 (
@@ -31,50 +34,98 @@ def generate_fake_ev44(
3134 )
3235
3336
37+ def generate_run_start (det_max : int ) -> bytes :
38+ det_spec_map = DetectorSpectrumMap (
39+ detector_ids = np .arange (0 , det_max , dtype = np .int32 ),
40+ spectrum_numbers = np .arange (0 , det_max , dtype = np .int32 ),
41+ n_spectra = det_max ,
42+ )
43+ return serialise_pl72 (
44+ start_time = int (time .time () * 1000 ),
45+ stop_time = None ,
46+ run_name = f"saluki-howl-{ uuid .uuid4 ()} " ,
47+ instrument_name = "saluki-howl" ,
48+ nexus_structure = json .dumps ({}),
49+ job_id = str (uuid .uuid4 ()),
50+ filename = str (uuid .uuid4 ()),
51+ detector_spectrum_map = det_spec_map ,
52+ )
53+
54+
55+ def generate_run_stop () -> bytes :
56+ return serialise_6s4t (
57+ stop_time = int (time .time () * 1000 ),
58+ job_id = str (uuid .uuid4 ()),
59+ )
60+
61+
3462def howl (
3563 broker : str ,
36- topic : str ,
64+ topic_prefix : str ,
3765 events_per_frame : int ,
3866 frames_per_second : int ,
67+ frames_per_run : int ,
3968 tof_peak : float ,
4069 tof_sigma : float ,
4170 det_min : int ,
4271 det_max : int ,
4372) -> None :
4473 """
45- Prints the broker and topic metadata for a given broker.
46- If a topic is given, only this topic's partitions and watermarks will be printed.
47- :param broker: The broker address including port number.
48- :param topic: Optional topic to filter information to.
74+ Send messages vaguely resembling a run to Kafka.
4975 """
5076
5177 producer = Producer (
5278 {
5379 "bootstrap.servers" : broker ,
80+ "queue.buffering.max.messages" : 100000 ,
81+ "queue.buffering.max.ms" : 20 ,
5482 }
5583 )
5684
5785 target_frame_time = 1 / frames_per_second
5886
59- msg_id = 0
87+ frames = 0
6088
61- ev44_size = len (generate_fake_ev44 (0 , events_per_frame , tof_peak , tof_sigma , det_min , det_max ))
89+ ev44_size = len (
90+ generate_fake_events (0 , events_per_frame , tof_peak , tof_sigma , det_min , det_max )
91+ )
6292 rate_bytes_per_sec = ev44_size * frames_per_second
6393 rate_mbit_per_sec = (rate_bytes_per_sec / 1024 ** 2 ) * 8
64- logger .info (f"Attempting to simulate data rate: { rate_mbit_per_sec :.3f} MBit/s" )
94+ logger .info (f"Attempting to simulate data rate: { rate_mbit_per_sec :.3f} Mbit/s" )
95+ logger .info (f"Each ev44 is { ev44_size } bytes" )
96+
97+ producer .produce (
98+ topic = f"{ topic_prefix } _runInfo" ,
99+ key = None ,
100+ value = generate_run_start (det_max ),
101+ )
65102
66103 while True :
67104 start_time = time .time ()
68105 target_end_time = start_time + target_frame_time
69106
70107 producer .produce (
71- topic = topic ,
108+ topic = f" { topic_prefix } _events" ,
72109 key = None ,
73- value = generate_fake_ev44 (
74- msg_id , events_per_frame , tof_peak , tof_sigma , det_min , det_max
110+ value = generate_fake_events (
111+ frames , events_per_frame , tof_peak , tof_sigma , det_min , det_max
75112 ),
76113 )
77- msg_id += 1
114+ producer .poll (0 )
115+ frames += 1
116+
117+ if frames_per_run != 0 and frames % frames_per_run == 0 :
118+ logger .info (f"Starting new run after { frames_per_run } simulated frames" )
119+ producer .produce (
120+ topic = f"{ topic_prefix } _runInfo" ,
121+ key = None ,
122+ value = generate_run_stop (),
123+ )
124+ producer .produce (
125+ topic = f"{ topic_prefix } _runInfo" ,
126+ key = None ,
127+ value = generate_run_start (det_max ),
128+ )
78129
79130 sleep_time = max (target_end_time - time .time (), 0 )
80131 if sleep_time == 0 :
0 commit comments