IGWN alerts#
We set up again our credentials and define the consumer
from credentials import load_credentials, get_credential, list_credentials
# Load credentials from credentials.txt
load_credentials()
# Check what's configured
print("Configured credentials:")
for key, status in list_credentials().items():
print(f" {key}: {status}")
# Access specific credentials when needed
try:
gcn_client_id = get_credential('GCN_CLIENT_ID')
gcn_client_secret = get_credential('GCN_CLIENT_SECRET')
print(f"\n✅ GCN Client ID: {gcn_client_id[:10]}..." )
except ValueError as e:
print(f"\n⚠️ {e}")
📖 Loading credentials from: /Users/samueleronchini/Desktop/acme_tutorials/alerts/credentials.txt
✅ Loaded 17 credentials
Configured credentials:
GCN_CLIENT_ID: ✅ Configured
GCN_CLIENT_SECRET: ✅ Configured
FINK_USERNAME: ⚠️ Empty
FINK_GROUP_ID: ⚠️ Empty
FINK_SERVERS: ✅ Configured
SLACK_WEBHOOK: ✅ Configured
SLACK_CHANNEL_ID: ✅ Configured
EMAIL_SENDER: ✅ Configured
EMAIL_PASSWORD: ✅ Configured
EMAIL_SMTP_SERVER: ✅ Configured
EMAIL_SMTP_PORT: ✅ Configured
EMAIL_RECIPIENT: ✅ Configured
TELEGRAM_BOT_TOKEN: ✅ Configured
GRACEDB_USERNAME: ⚠️ Empty
GRACEDB_PASSWORD: ⚠️ Empty
GCN_CLIENT_ID_PROD: ✅ Configured
GCN_CLIENT_SECRET_PROD: ✅ Configured
✅ GCN Client ID: 5h42prl0v1...
from gcn_kafka import Consumer
from confluent_kafka import TopicPartition
import os
import json
import datetime
import email
import xmltodict
CONFIG = {"group.id": "", "auto.offset.reset": "earliest"}
consumer = Consumer(config=CONFIG,
client_id=gcn_client_id,
client_secret=gcn_client_secret,
domain='gcn.nasa.gov')
Then we define a funtion to parse GW notices
def parse_gw_notice(record):
record = json.loads(record)
# print(datetime.datetime.now(), record['superevent_id'], 'received')
try:
if record['event']['significant']==True and record['superevent_id'][0] != 'M':
# this is a significant event
print('Significant event detected:', record['superevent_id'])
if record['superevent_id'][0] == 'M':
# print('Mock event detected:', record['superevent_id'])
return
if record['alert_type'] == 'RETRACTION':
print(record['superevent_id'], 'was retracted')
return
if record['alert_type'] == 'EARLYWARNING':
print(datetime.datetime.now(), record['superevent_id'], 'EARLYWARNING')
if record.get('external_coinc') is not None:
print(datetime.datetime.now(), record['superevent_id'], 'RAVEN_ALERT')
if record['event']['search'] == 'SSM':
print(datetime.datetime.now(), record['superevent_id'], 'SSM event')
HasNS = record['event']['properties']['HasNS']
HasSSM = record['event']['properties']['HasSSM']
HasMassGap = record['event']['properties']['HasMassGap']
print('HasNS:', HasNS, 'HasSSM:', HasSSM, 'HasMassGap:', HasMassGap)
#this far is not corrected by trials factor
#meaning that we receive on average more alerts than 1/month
if record['event']['group'] == 'Burst':
far_min=1/(7*86400)
else:
far_min=1/(30*86400)
far=record['event']['far']
if far<far_min:
if far<1/(86400*365):
rate=1/far/(86400*365) #like 1 over 3 year
s3='FAR=1/%s years' %round(rate,2)
elif far<1/(86400*30) and far>=1/(86400*365):
rate=1/far/(86400*30) #like 1 per x months
s3='FAR=1/%s months' %round(rate,2)
elif far>1/(86400*30):
rate=1/far/(86400) #like 1 per x days
s3='FAR=1/%s days' %round(rate,2)
print(record['superevent_id'],'passes FAR cut:', s3)
if record['event']['group'] == 'CBC':
if record['event']['search'] == 'SSM':
print('SSM event detected, skipping p_astro classification ')
P_HasNS = round(record['event']['properties']['HasNS'],2)
P_HasMassGap = round(record['event']['properties']['HasMassGap'],2)
P_HasSSM = round(record['event']['properties']['HasSSM'],2)
print('P(HasNS):', P_HasNS, 'P(HasMassGap):', P_HasMassGap, 'P(HasSSM):', P_HasSSM)
else:
BNS=round(record['event']['classification']['BNS'],2)
NSBH=round(record['event']['classification']['NSBH'],2)
BBH=round(record['event']['classification']['BBH'],2)
terr=round(record['event']['classification']['Terrestrial'],2)
print('Classifications - P(BNS):', BNS, 'P(NSBH):', NSBH, 'P(BBH):', BBH, 'P(Terr):', terr)
P_HasNS = round(record['event']['properties']['HasNS'],2)
P_HasRemnant = round(record['event']['properties']['HasRemnant'],2)
P_HasMassGap = round(record['event']['properties']['HasMassGap'],2)
print('P(HasNS):', P_HasNS, 'P(HasRemnant):', P_HasRemnant, 'P(HasMassGap):', P_HasMassGap)
else:
print('Selections passed, something else happened')
else:
print(record['superevent_id'],'does not pass FAR cut')
except Exception as e:
print("An error occurred while processing the record:", str(e))
return
Let’s check past notices#
We are going to test on S251112cm
import datetime
# Subscribe to topics and receive alerts
topics = 'igwn.gwalert'
timestamp1 = int(datetime.datetime(2025, 11, 12, 15, 31, 1, 501529, tzinfo=datetime.timezone.utc).timestamp() * 1000)
timestamp2 = int(timestamp1 + 0.5*86400000) # +2 days
start = consumer.offsets_for_times(
[TopicPartition(topics, 0, timestamp1)])
end = consumer.offsets_for_times(
[TopicPartition(topics, 0, timestamp2)])
consumer.assign(start)
# Calculate the number of messages to consume, ensuring it's within valid range
num_messages = end[0].offset - start[0].offset
for message in consumer.consume(abs(num_messages), timeout=2):
if message.error():
print(message.error())
continue
# Print the topic and message ID
try:
#print(f'topic={message.topic()}, offset={message.offset(), json.loads(message.value())["event"]["time"]}')
parse_gw_notice(message.value())
except Exception as e:
print(f"Error parsing message: {e}")
Significant event detected: S251112cm
2025-11-19 15:18:48.625767 S251112cm SSM event
HasNS: 0.08 HasSSM: 1.0 HasMassGap: 0.0
S251112cm passes FAR cut: FAR=1/6.21 years
SSM event detected, skipping p_astro classification
P(HasNS): 0.08 P(HasMassGap): 0.0 P(HasSSM): 1.0