diff --git a/.gitignore b/.gitignore
index 919c218a..bfcadab3 100755
--- a/.gitignore
+++ b/.gitignore
@@ -31,6 +31,6 @@ lib/__pycache__/milenage.cpython-36.pyc
pyhss.egg-info
!tests/config.yaml
-tests/.pyhss.db
+tests/.pyhss*.db
.venv
hss.db
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 693ee839..eb9fe7c2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -63,6 +63,91 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- SQN Resync now propogates via Geored when enabled
- Renamed sh_profile to xcap_profile in ims_subscriber
- Rebuilt keys using unique namespace for redis-sentinel / stateless compatibility.
+- The database scheme was changed as follows. If you have a PyHSS database
+ created with version 1.0.0 that you would like to use with 1.0.1 or newer,
+ apply these changes manually. Newer versions of PyHSS have automatic database
+ migrations.
+
+
+```diff
+--- a/release_1.0.0.sql
++++ b/release_1.0.1.sql
+@@ -13,6 +13,12 @@ CREATE TABLE apn (
+ arp_preemption_capability BOOLEAN,
+ arp_preemption_vulnerability BOOLEAN,
+ charging_rule_list VARCHAR(18),
++ nbiot BOOLEAN,
++ nidd_scef_id VARCHAR(512),
++ nidd_scef_realm VARCHAR(512),
++ nidd_mechanism INTEGER,
++ nidd_rds INTEGER,
++ nidd_preferred_data_mode INTEGER,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (apn_id)
+ );
+@@ -80,22 +86,40 @@ CREATE TABLE eir_history (
+ PRIMARY KEY (imsi_imei_history_id),
+ UNIQUE (imsi_imei)
+ );
+ CREATE TABLE ims_subscriber (
+ ims_subscriber_id INTEGER NOT NULL,
+ msisdn VARCHAR(18),
+ msisdn_list VARCHAR(1200),
+ imsi VARCHAR(18),
+- ifc_path VARCHAR(18),
++ ifc_path VARCHAR(512),
+ pcscf VARCHAR(512),
+ pcscf_realm VARCHAR(512),
+ pcscf_active_session VARCHAR(512),
+ pcscf_timestamp DATETIME,
+ pcscf_peer VARCHAR(512),
++ xcap_profile TEXT(12000),
+ sh_profile TEXT(12000),
+ scscf VARCHAR(512),
+ scscf_timestamp DATETIME,
+ scscf_realm VARCHAR(512),
+ scscf_peer VARCHAR(512),
++ sh_template_path VARCHAR(512),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (ims_subscriber_id),
+ UNIQUE (msisdn)
+@@ -115,6 +139,9 @@ CREATE TABLE operation_log (
+ auc_id INTEGER,
+ subscriber_id INTEGER,
+ ims_subscriber_id INTEGER,
++ roaming_rule_id INTEGER,
++ roaming_network_id INTEGER,
++ emergency_subscriber_id INTEGER,
+ charging_rule_id INTEGER,
+ tft_id INTEGER,
+ eir_id INTEGER,
+@@ -127,12 +154,33 @@ CREATE TABLE operation_log (
+ FOREIGN KEY(auc_id) REFERENCES auc (auc_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id),
+ FOREIGN KEY(ims_subscriber_id) REFERENCES ims_subscriber (ims_subscriber_id),
++ FOREIGN KEY(roaming_rule_id) REFERENCES roaming_rule (roaming_rule_id),
++ FOREIGN KEY(roaming_network_id) REFERENCES roaming_network (roaming_network_id),
++ FOREIGN KEY(emergency_subscriber_id) REFERENCES emergency_subscriber (emergency_subscriber_id),
+ FOREIGN KEY(charging_rule_id) REFERENCES charging_rule (charging_rule_id),
+ FOREIGN KEY(tft_id) REFERENCES tft (tft_id),
+ FOREIGN KEY(eir_id) REFERENCES eir (eir_id),
+ FOREIGN KEY(imsi_imei_history_id) REFERENCES eir_history (imsi_imei_history_id),
+ FOREIGN KEY(subscriber_attributes_id) REFERENCES subscriber_attributes (subscriber_attributes_id)
+ );
+ CREATE TABLE serving_apn (
+ serving_apn_id INTEGER NOT NULL,
+ subscriber_id INTEGER,
+@@ -160,6 +208,8 @@ CREATE TABLE subscriber (
+ ue_ambr_dl INTEGER,
+ ue_ambr_ul INTEGER,
+ nam INTEGER,
++ roaming_enabled BOOLEAN,
++ roaming_rule_list VARCHAR(512),
+ subscribed_rau_tau_timer INTEGER,
+ serving_mme VARCHAR(512),
+ serving_mme_timestamp DATETIME,
+```
+
### Fixed
diff --git a/lib/SQN.py b/lib/SQN.py
new file mode 100644
index 00000000..d3323da6
--- /dev/null
+++ b/lib/SQN.py
@@ -0,0 +1,39 @@
+# Copyright 2025 sysmocom - s.f.m.c. GmbH
+# SPDX-License-Identifier: AGPL-3.0-or-later
+from typing import Optional
+
+
+class SQN:
+ def __init__(self, seq: int, ind: int, ind_bitlen: Optional[int]):
+ # 3GPP TS 33.102 § C.3.2 suggests 5
+ if not ind_bitlen:
+ ind_bitlen = 5
+
+ self.seq = seq
+ self.ind = ind
+ self.ind_bitlen = ind_bitlen
+
+ @classmethod
+ def from_sqn(cls, sqn: int, ind_bitlen: Optional[int]):
+ # 3GPP TS 33.102 § C.3.2 suggests 5
+ if not ind_bitlen:
+ ind_bitlen = 5
+
+ seq = sqn >> ind_bitlen
+
+ bitmask = (1 << ind_bitlen) - 1
+ ind = sqn & bitmask
+
+ return SQN(seq, ind, ind_bitlen)
+
+ def get(self):
+ # FIXME: modulo for seq by max seq len
+
+ # Modulo by ind_bitlen value range
+ ind = self.ind
+ ind &= (1 << self.ind_bitlen) - 1
+
+ return (self.seq << self.ind_bitlen) + ind
+
+ def __repr__(self):
+ return f"SQN={self.get()}(SEQ={self.seq},IND={self.ind})"
diff --git a/lib/database.py b/lib/database.py
index f810e2d7..29793dad 100755
--- a/lib/database.py
+++ b/lib/database.py
@@ -5,11 +5,10 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
from typing import Optional
+import sqlalchemy
from sqlalchemy import Column, Integer, String, MetaData, Table, Boolean, ForeignKey, select, UniqueConstraint, DateTime, BigInteger, Text, DateTime, Float
-from sqlalchemy import create_engine, inspect
-from sqlalchemy.engine.reflection import Inspector
+from sqlalchemy import create_engine
from sqlalchemy.sql import desc, func
-from sqlalchemy_utils import database_exists, create_database
from sqlalchemy.orm import sessionmaker, relationship, Session, class_mapper
from sqlalchemy.orm.attributes import History, get_history
from sqlalchemy.orm import declarative_base
@@ -22,6 +21,7 @@
import socket
import pprint
import S6a_crypt
+from databaseSchema import DatabaseSchema
from baseModels import SubscriberInfo, LocationInfo2G
from gsup.protocol.ipa_peer import IPAPeerRole
from messaging import RedisMessaging
@@ -29,9 +29,17 @@
import socket
import traceback
from pyhss_config import config
+from SQN import SQN
Base = declarative_base()
+
+class DATABASE_SCHEMA_VERSION(Base):
+ __tablename__ = "database_schema_version"
+ upgrade_id = Column(Integer, primary_key=True, doc="Schema version")
+ comment = Column(String(512), doc="Notes about this version upgrade")
+ date = Column(DateTime(timezone=True), server_default=sqlalchemy.sql.func.now(), doc="When the upgrade was done")
+
class APN(Base):
__tablename__ = 'apn'
apn_id = Column(Integer, primary_key=True, doc='Unique ID of APN')
@@ -62,7 +70,8 @@ class AUC(Base):
ki = Column(String(32), doc='SIM Key - Authentication Key - Ki', nullable=False)
opc = Column(String(32), doc='SIM Key - Network Operators key OPc', nullable=False)
amf = Column(String(4), doc='Authentication Management Field', nullable=False)
- sqn = Column(BigInteger, doc='Authentication sequence number')
+ sqn = Column(BigInteger, doc='Authentication sequence number (deprecated)')
+ sqn_ind_bitlen = Column(Integer, default=None, doc="Number of IND bits at lower SQN end (default is 5 if set to None)")
iccid = Column(String(20), unique=True, doc='Integrated Circuit Card Identification Number')
imsi = Column(String(18), unique=True, doc='International Mobile Subscriber Identity')
batch_name = Column(String(20), doc='Name of SIM Batch')
@@ -283,6 +292,25 @@ class OPERATION_LOG_BASE(Base):
table_name = Column('table_name', String(255))
__mapper_args__ = {'polymorphic_on': table_name}
+class AUTH_SQN_IND(Base):
+ __tablename__ = "auth_sqn_ind"
+ ind_id = Column(Integer, primary_key=True)
+ client_name = Column(String(255), unique=True, nullable=False)
+
+class AUTH_SQN_SEQ(Base):
+ __tablename__ = "auth_sqn_seq"
+ __table_args__ = (
+ UniqueConstraint("auc_id", "ind_id"),
+ )
+ seq_id = Column(Integer, primary_key=True)
+ auc_id = Column(Integer, ForeignKey("auc.auc_id"))
+ ind_id = Column(Integer, ForeignKey("auth_sqn_ind.ind_id"))
+ seq = Column(BigInteger)
+
+ def __init__(self, auc_id, ind_id):
+ self.auc_id = auc_id
+ self.ind_id = ind_id
+
class APN_OPERATION_LOG(OPERATION_LOG_BASE):
__mapper_args__ = {'polymorphic_identity': 'apn'}
apn = relationship("APN", back_populates="operation_logs")
@@ -356,7 +384,7 @@ class SUBSCRIBER_ATTRIBUTES_OPERATION_LOG(OPERATION_LOG_BASE):
class Database:
- def __init__(self, logTool, redisMessaging=None):
+ def __init__(self, logTool, redisMessaging=None, main_service: bool = False):
self.redisUseUnixSocket = config.get('redis', {}).get('useUnixSocket', False)
self.redisUnixSocketPath = config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock')
@@ -395,13 +423,7 @@ def __init__(self, logTool, redisMessaging=None):
pool_size=config['logging'].get('sqlalchemy_pool_size', 30),
max_overflow=config['logging'].get('sqlalchemy_max_overflow', 0))
- # Create database if it does not exist.
- if not database_exists(self.engine.url):
- self.logTool.log(service='Database', level='debug', message="Creating database", redisClient=self.redisMessaging)
- create_database(self.engine.url)
- Base.metadata.create_all(self.engine)
- else:
- self.logTool.log(service='Database', level='debug', message="Database already created", redisClient=self.redisMessaging)
+ DatabaseSchema(self.logTool, Base, self.engine, main_service)
#Load IMEI TAC database into Redis if enabled
if self.tacDatabasePath:
@@ -411,15 +433,6 @@ def __init__(self, logTool, redisMessaging=None):
self.logTool.log(service='Database', level='info', message="Not loading EIR IMEI TAC Database as Redis not enabled or TAC CSV Database not set in config", redisClient=self.redisMessaging)
self.tacData = {}
- # Create individual tables if they do not exist
- inspector = inspect(self.engine)
- for table_name in Base.metadata.tables.keys():
- if table_name not in inspector.get_table_names():
- self.logTool.log(service='Database', level='debug', message=f"Creating table {table_name}", redisClient=self.redisMessaging)
- Base.metadata.tables[table_name].create(bind=self.engine)
- else:
- self.logTool.log(service='Database', level='debug', message=f"Table {table_name} already exists", redisClient=self.redisMessaging)
-
def load_IMEI_database_into_Redis(self):
try:
self.logTool.log(service='Database', level='info', message=f"Reading IMEI TAC database CSV from: {self.tacDatabasePath}", redisClient=self.redisMessaging)
@@ -1588,95 +1601,192 @@ def Get_Served_PCRF_Subscribers(self, get_local_users_only=False):
self.safe_close(session)
return Served_Subs
- def Get_Vectors_AuC(self, auc_id, action, **kwargs):
- self.logTool.log(service='Database', level='debug', message="Getting Vectors for auc_id " + str(auc_id) + " with action " + str(action), redisClient=self.redisMessaging)
+ def Get_AUTH_SQN_IND(self, client_name: str) -> int:
+ with Session(self.engine) as session:
+ result = session.query(AUTH_SQN_IND).filter(AUTH_SQN_IND.client_name == client_name).one_or_none()
+ if result:
+ ind = result.ind_id
+ self.logTool.log(
+ service="Database",
+ level="debug",
+ message=f"Getting SQN IND for {client_name}: {ind} (previously assigned)",
+ redisClient=self.redisMessaging,
+ )
+ return ind
+
+ new_row = AUTH_SQN_IND(client_name=client_name)
+ session.add(new_row)
+ session.commit()
+ ind = new_row.ind_id
+ self.logTool.log(
+ service="Database",
+ level="debug",
+ message=f"Getting SQN IND for {client_name}: {ind} (new)",
+ redisClient=self.redisMessaging,
+ )
+ return ind
+
+ def Get_AUTH_SQNs(self, auc_data: dict, ind: int, count: int) -> list[int]:
+ if count < 1:
+ raise RuntimeError("Get_AUTH_SQNs: count must be > 0")
+
+ ret = []
+ auc_id = auc_data["auc_id"]
+ ind_bitlen = auc_data["sqn_ind_bitlen"]
+
+ with Session(self.engine) as session:
+ # Get SEQ from database
+ seq_obj = session.query(AUTH_SQN_SEQ).filter_by(
+ auc_id=auc_id,
+ ind_id=ind,
+ ).one_or_none()
+ seq = seq_obj.seq if seq_obj else 0
+
+ # Generate SQNs with increased SEQ
+ sqn = SQN(seq, ind, ind_bitlen)
+ for i in range(count):
+ sqn.seq += 1
+ ret += [sqn.get()]
+ self.logTool.log(
+ service="Database",
+ level="debug",
+ message=f"Get_AUTH_SQNs for auc_id={auc_id}: {sqn}",
+ )
+
+ # Update SEQ in database
+ if not seq_obj:
+ seq_obj = AUTH_SQN_SEQ(auc_id, ind)
+ seq_obj.seq = sqn.seq
+ session.merge(seq_obj)
+ session.commit()
+
+ return ret
+
+ def Get_AUTH_SQN(self, auc_data: dict, ind: int) -> int:
+ return self.Get_AUTH_SQNs(auc_data, ind, 1)[0]
+
+ def Get_Vectors_AuC_air(self, auc_id, plmn, ind):
+ self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action air", redisClient=self.redisMessaging)
key_data = self.GetObj(AUC, auc_id)
vector_dict = {}
-
- if action == "air":
- rand, xres, autn, kasme = S6a_crypt.generate_eutran_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], kwargs['plmn'])
- vector_dict['rand'] = rand
- vector_dict['xres'] = xres
- vector_dict['autn'] = autn
- vector_dict['kasme'] = kasme
-
- #Incriment SQN
- self.Update_AuC(auc_id, sqn=key_data['sqn']+100)
-
- return vector_dict
-
- elif action == "sqn_resync":
- self.logTool.log(service='Database', level='debug', message="Resync SQN", redisClient=self.redisMessaging)
- rand = kwargs['rand']
- sqn, mac_s = S6a_crypt.generate_resync_s6a(key_data['ki'], key_data['opc'], key_data['amf'], kwargs['auts'], rand)
- self.logTool.log(service='Database', level='debug', message="SQN from resync: " + str(sqn) + " SQN in DB is " + str(key_data['sqn']) + "(Difference of " + str(int(sqn) - int(key_data['sqn'])) + ")", redisClient=self.redisMessaging)
- self.Update_AuC(auc_id, sqn=sqn+100)
- return
-
- elif action == "sip_auth":
- rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], kwargs['plmn'])
- self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging)
- self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging)
- vector_dict['SIP_Authenticate'] = rand + autn
- vector_dict['xres'] = xres
- vector_dict['ck'] = ck
- vector_dict['ik'] = ik
- self.Update_AuC(auc_id, sqn=key_data['sqn']+100)
- return vector_dict
-
- elif action == "aka":
- rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], kwargs['plmn'])
- vector_list = []
- self.logTool.log(service='Database', level='debug', message="Generating " + str(kwargs['requested_vectors']) + " vectors for GSM use", redisClient=self.redisMessaging)
- while kwargs['requested_vectors'] != 0:
- self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging)
- self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging)
-
- vector_dict['rand'] = binascii.hexlify(rand).decode("utf-8")
- vector_dict['autn'] = binascii.hexlify(autn).decode("utf-8")
- vector_dict['xres'] = binascii.hexlify(xres).decode("utf-8")
- vector_dict['ck'] = binascii.hexlify(ck).decode("utf-8")
- vector_dict['ik'] = binascii.hexlify(ik).decode("utf-8")
-
- kwargs['requested_vectors'] = kwargs['requested_vectors'] - 1
- vector_list.append(vector_dict)
- self.Update_AuC(auc_id, sqn=key_data['sqn']+100)
- return vector_list
-
- elif action == "2g3g":
- # Mask first bit of AMF
- key_data['amf'] = '0' + key_data['amf'][1:]
- vect = S6a_crypt.generate_2g3g_vector(key_data['ki'], key_data['opc'], key_data['amf'], int(key_data['sqn']), int(key_data['algo']))
- vector_list = []
- self.logTool.log(service='Database', level='debug', message="Generating " + str(kwargs['requested_vectors']) + " vectors for GSM use", redisClient=self.redisMessaging)
- while kwargs['requested_vectors'] != 0:
- kwargs['requested_vectors'] = kwargs['requested_vectors'] - 1
- vector_list.append(vect)
- self.Update_AuC(auc_id, sqn=key_data['sqn']+100)
- return vector_list
-
- elif action == "eap_aka":
- rand, xres, autn, mac_a, ak = S6a_crypt.generate_eap_aka_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], kwargs['plmn'])
+
+ sqn = self.Get_AUTH_SQN(key_data, ind)
+ rand, xres, autn, kasme = S6a_crypt.generate_eutran_vector(key_data['ki'], key_data['opc'], key_data['amf'], sqn, plmn)
+ vector_dict['rand'] = rand
+ vector_dict['xres'] = xres
+ vector_dict['autn'] = autn
+ vector_dict['kasme'] = kasme
+
+ return vector_dict
+
+ def Get_Vectors_AuC_sqn_resync(self, auc_id, auts, rand):
+ self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action sqn_resync")
+ key_data = self.GetObj(AUC, auc_id)
+
+ self.logTool.log(service='Database', level='debug', message="Resync SQN")
+ sqn, mac_s = S6a_crypt.generate_resync_s6a(key_data['ki'], key_data['opc'], key_data['amf'], auts, rand)
+ ind_bitlen = key_data["sqn_ind_bitlen"]
+ sqn_obj = SQN.from_sqn(sqn, ind_bitlen)
+ self.logTool.log(service='Database', level='debug', message=f"New: {sqn_obj}")
+
+ with Session(self.engine) as session:
+ seq_obj = session.query(AUTH_SQN_SEQ).filter_by(
+ auc_id=auc_id,
+ ind_id=sqn_obj.ind,
+ ).one_or_none()
+ if seq_obj:
+ self.logTool.log(service='Database', level='debug', message=f"Old: {SQN(seq_obj.seq, seq_obj.ind_id, ind_bitlen)}")
+ else:
+ seq_obj = AUTH_SQN_SEQ(auc_id, sqn_obj.ind_id)
+ seq_obj.seq = sqn_obj.seq
+ session.merge(seq_obj)
+ session.commit()
+
+ def Get_Vectors_AuC_sip_auth(self, auc_id, plmn, ind):
+ self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action auc_id", redisClient=self.redisMessaging)
+ key_data = self.GetObj(AUC, auc_id)
+ vector_dict = {}
+
+ sqn = self.Get_AUTH_SQN(key_data, ind)
+ rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], sqn, plmn)
+ self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging)
+ self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging)
+ vector_dict['SIP_Authenticate'] = rand + autn
+ vector_dict['xres'] = xres
+ vector_dict['ck'] = ck
+ vector_dict['ik'] = ik
+
+ return vector_dict
+
+ def Get_Vectors_AuC_aka(self, auc_id, plmn, ind, count):
+ self.logTool.log(service='Database', level='debug', message=f"Getting {count} Vectors for auc_id {auc_id} with action aka", redisClient=self.redisMessaging)
+ key_data = self.GetObj(AUC, auc_id)
+ sqns = self.Get_AUTH_SQNs(key_data, ind, count)
+ vector_dict = {}
+ vector_list = []
+
+ for i in range(count):
+ rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], sqns[i], plmn)
self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging)
self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging)
+
vector_dict['rand'] = binascii.hexlify(rand).decode("utf-8")
vector_dict['autn'] = binascii.hexlify(autn).decode("utf-8")
vector_dict['xres'] = binascii.hexlify(xres).decode("utf-8")
- vector_dict['mac'] = binascii.hexlify(mac_a).decode("utf-8")
- vector_dict['ak'] = binascii.hexlify(ak).decode("utf-8")
- self.Update_AuC(auc_id, sqn=key_data['sqn']+100)
- return vector_dict
-
- elif action == "Digest-MD5":
- self.logTool.log(service='Database', level='debug', message="Generating Digest-MD5 Auth vectors", redisClient=self.redisMessaging)
- self.logTool.log(service='Database', level='debug', message="key_data: " + str(key_data), redisClient=self.redisMessaging)
- nonce = uuid.uuid4().hex
- #nonce = "beef4d878f2642ed98afe491b943ca60"
- vector_dict['nonce'] = nonce
- vector_dict['SIP_Authenticate'] = key_data['ki']
- return vector_dict
- else:
- self.logTool.log(service='Database', level='error', message="Invalid action: " + str(action), redisClient=self.redisMessaging)
+ vector_dict['ck'] = binascii.hexlify(ck).decode("utf-8")
+ vector_dict['ik'] = binascii.hexlify(ik).decode("utf-8")
+
+ vector_list.append(vector_dict)
+
+ return vector_list
+
+ def Get_Vectors_AuC_2g3g(self, auc_id, ind, count):
+ self.logTool.log(service='Database', level='debug', message=f"Getting {count} Vectors for auc_id {auc_id} with action 2g3g", redisClient=self.redisMessaging)
+ key_data = self.GetObj(AUC, auc_id)
+
+ # Mask first bit of AMF
+ key_data['amf'] = '0' + key_data['amf'][1:]
+
+ algo = int(key_data["algo"]) if key_data["algo"] is not None else 3
+ sqns = self.Get_AUTH_SQNs(key_data, ind, count)
+ vector_list = []
+
+ for i in range(count):
+ vect = S6a_crypt.generate_2g3g_vector(key_data['ki'], key_data['opc'], key_data['amf'], sqns[i], algo)
+ vector_list.append(vect)
+
+ return vector_list
+
+ def Get_Vectors_AuC_eap_aka(self, auc_id, plmn, ind):
+ self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action eap_aka", redisClient=self.redisMessaging)
+ key_data = self.GetObj(AUC, auc_id)
+ sqn = self.Get_AUTH_SQN(key_data, ind)
+ vector_dict = {}
+
+ rand, xres, autn, mac_a, ak = S6a_crypt.generate_eap_aka_vector(key_data['ki'], key_data['opc'], key_data['amf'], sqn, plmn)
+ self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging)
+ self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging)
+ vector_dict['rand'] = binascii.hexlify(rand).decode("utf-8")
+ vector_dict['autn'] = binascii.hexlify(autn).decode("utf-8")
+ vector_dict['xres'] = binascii.hexlify(xres).decode("utf-8")
+ vector_dict['mac'] = binascii.hexlify(mac_a).decode("utf-8")
+ vector_dict['ak'] = binascii.hexlify(ak).decode("utf-8")
+
+ return vector_dict
+
+ def Get_Vectors_AuC_digest_md5(self, auc_id):
+ self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action Digest-MD5", redisClient=self.redisMessaging)
+ key_data = self.GetObj(AUC, auc_id)
+ vector_dict = {}
+
+ self.logTool.log(service='Database', level='debug', message="Generating Digest-MD5 Auth vectors", redisClient=self.redisMessaging)
+ self.logTool.log(service='Database', level='debug', message="key_data: " + str(key_data), redisClient=self.redisMessaging)
+ nonce = uuid.uuid4().hex
+ #nonce = "beef4d878f2642ed98afe491b943ca60"
+ vector_dict['nonce'] = nonce
+ vector_dict['SIP_Authenticate'] = key_data['ki']
+
+ return vector_dict
def Get_Gsup_SubscriberInfo(self, imsi: str) -> SubscriberInfo:
subscriber = self.Get_Subscriber(imsi=imsi)
diff --git a/lib/databaseSchema.py b/lib/databaseSchema.py
new file mode 100644
index 00000000..c52b2584
--- /dev/null
+++ b/lib/databaseSchema.py
@@ -0,0 +1,215 @@
+# Copyright 2025 sysmocom - s.f.m.c. GmbH
+# SPDX-License-Identifier: AGPL-3.0-or-later
+import sqlalchemy
+import sys
+import time
+from sqlalchemy.engine import Engine
+from sqlalchemy_utils import database_exists, create_database
+
+
+class DatabaseSchema:
+ latest = 2
+
+ def __init__(self, logTool, base, engine: Engine, main_service: bool):
+ self.logTool = logTool
+ self.base = base
+ self.engine = engine
+
+ if not self.is_ready():
+ if main_service:
+ self.init_db()
+ self.init_tables()
+ self.upgrade_all()
+ else:
+ self.wait_until_ready()
+
+ def get_version(self):
+ ret = 0
+ try:
+ sql = """
+ SELECT upgrade_id
+ FROM database_schema_version
+ ORDER BY upgrade_id DESC
+ LIMIT 1
+ """
+ with self.engine.connect() as conn:
+ result = conn.execute(sqlalchemy.text(sql)).fetchone()
+ if result:
+ ret = result[0]
+ except Exception:
+ pass
+ return ret
+
+ def is_ready(self):
+ if not database_exists(self.engine.url):
+ return False
+ return self.get_version() == self.latest
+
+ def wait_until_ready(self):
+ self.logTool.log(
+ service="Database",
+ level="info",
+ message="Waiting for the main service to prepare the database",
+ )
+
+ for i in range(100):
+ time.sleep(0.2)
+ if self.is_ready():
+ return
+
+ self.logTool.log(
+ service="Database",
+ level="error",
+ message="Database did not get ready. Is pyhss_hss (hssService) running?",
+ )
+ sys.exit(10)
+
+ def ensure_release_1_0_1_or_newer(self):
+ expected = {
+ "apn": [
+ "nbiot",
+ "nidd_scef_id",
+ "nidd_scef_realm",
+ "nidd_mechanism",
+ "nidd_rds",
+ "nidd_preferred_data_mode",
+ ],
+ "ims_subscriber": [
+ "xcap_profile",
+ "sh_template_path",
+ ],
+ "operation_log": [
+ "roaming_rule_id",
+ "roaming_network_id",
+ "emergency_subscriber_id",
+ ],
+ "subscriber": [
+ "roaming_enabled",
+ "roaming_rule_list",
+ ],
+ }
+
+ for table, columns in expected.items():
+ for column in columns:
+ if not self.column_exists(table, column):
+ self.logTool.log(
+ service="Database",
+ level="warning",
+ message=f"Database column missing: {table}.{column}",
+ )
+ self.logTool.log(
+ service="Database",
+ level="error",
+ message="Database schemas from before PyHSS 1.0.1 are not supported."
+ " Start with a new database or migrate manually:"
+ " https://github.com/nickvsnetworking/pyhss/blob/master/CHANGELOG.md#101---2024-01-23",
+ )
+ sys.exit(20)
+
+ def init_db(self):
+ # Create database if it does not exist
+ if not database_exists(self.engine.url):
+ self.logTool.log(
+ service="Database",
+ level="debug",
+ message="Creating database",
+ )
+ create_database(self.engine.url)
+ self.base.metadata.create_all(self.engine)
+ else:
+ version = self.get_version()
+ self.logTool.log(
+ service="Database",
+ level="debug",
+ message=f"Database already created (schema version: {version})",
+ )
+ if version > self.latest:
+ self.logTool.log(
+ service="Database",
+ level="warning",
+ message=f"Database schema version {version} is higher than latest known version {self.latest}",
+ )
+ else:
+ self.ensure_release_1_0_1_or_newer()
+
+ def init_tables(self):
+ # Create individual tables if they do not exist
+ inspector = sqlalchemy.inspect(self.engine)
+ for table_name in self.base.metadata.tables.keys():
+ if table_name not in inspector.get_table_names():
+ self.logTool.log(
+ service="Database",
+ level="debug",
+ message=f"Creating table {table_name}",
+ )
+ self.base.metadata.tables[table_name].create(bind=self.engine)
+ else:
+ self.logTool.log(
+ service="Database",
+ level="debug",
+ message=f"Table {table_name} already exists",
+ )
+
+ def execute(self, sql):
+ with self.engine.connect() as conn:
+ conn.execute(sqlalchemy.text(sql))
+ conn.commit()
+
+ def upgrade_msg(self, new_version):
+ self.logTool.log(
+ service="Database",
+ level="info",
+ message=f"Upgrading database schema to version {new_version}",
+ )
+
+ def set_version(self, new_version):
+ self.execute(f"""
+ INSERT INTO database_schema_version (upgrade_id, comment)
+ VALUES ({int(new_version)}, "automatic upgrade from PyHSS")
+ """)
+
+ def column_exists(self, table, column):
+ inspector = sqlalchemy.inspect(self.engine)
+ columns = inspector.get_columns(table)
+
+ for col in columns:
+ if col["name"] == column:
+ return True
+
+ return False
+
+ def add_column(self, table, column, type):
+ if self.column_exists(table, column):
+ return
+ self.execute(f"ALTER TABLE {table} ADD {column} {type}")
+
+ def upgrade_from_20240603_release_1_0_1(self):
+ if self.get_version() >= 1:
+ return
+ self.upgrade_msg(1)
+ self.add_column("auc", "algo", "VARCHAR(20)")
+ self.add_column("subscriber", "last_location_update_timestamp", "DATETIME")
+ self.add_column("subscriber", "last_seen_cell_id", "VARCHAR(64)")
+ self.add_column("subscriber", "last_seen_eci", "VARCHAR(64)")
+ self.add_column("subscriber", "last_seen_enodeb_id", "VARCHAR(64)")
+ self.add_column("subscriber", "last_seen_mcc", "VARCHAR(3)")
+ self.add_column("subscriber", "last_seen_mnc", "VARCHAR(3)")
+ self.add_column("subscriber", "last_seen_tac", "VARCHAR(64)")
+ self.add_column("subscriber", "serving_msc", "VARCHAR(512)")
+ self.add_column("subscriber", "serving_msc_timestamp", "DATETIME")
+ self.add_column("subscriber", "serving_sgsn", "VARCHAR(512)")
+ self.add_column("subscriber", "serving_sgsn_timestamp", "DATETIME")
+ self.add_column("subscriber", "serving_vlr", "VARCHAR(512)")
+ self.add_column("subscriber", "serving_vlr_timestamp", "DATETIME")
+ self.set_version(1)
+
+ def upgrade_to_v2(self):
+ if self.get_version() >= 2:
+ return
+ self.upgrade_msg(2)
+ self.add_column("auc", "sqn_ind_bitlen", "INTEGER")
+ self.set_version(2)
+
+ def upgrade_all(self):
+ self.upgrade_from_20240603_release_1_0_1()
+ self.upgrade_to_v2()
diff --git a/lib/diameter.py b/lib/diameter.py
index f95ab3e0..266ded8f 100755
--- a/lib/diameter.py
+++ b/lib/diameter.py
@@ -29,7 +29,17 @@
class Diameter:
- def __init__(self, logTool, originHost: str="hss01", originRealm: str="epc.mnc999.mcc999.3gppnetwork.org", productName: str="PyHSS", mcc: str="999", mnc: str="999", redisMessaging=None):
+ def __init__(
+ self,
+ logTool,
+ originHost: str = "hss01",
+ originRealm: str = "epc.mnc999.mcc999.3gppnetwork.org",
+ productName: str = "PyHSS",
+ mcc: str = "999",
+ mnc: str = "999",
+ redisMessaging=None,
+ main_service: bool = False,
+ ):
self.OriginHost = self.string_to_hex(originHost)
self.OriginRealm = self.string_to_hex(originRealm)
self.ProductName = self.string_to_hex(productName)
@@ -49,7 +59,7 @@ def __init__(self, logTool, originHost: str="hss01", originRealm: str="epc.mnc99
self.hostname = socket.gethostname()
- self.database = Database(logTool=logTool)
+ self.database = Database(logTool=logTool, main_service=main_service)
self.diameterRequestTimeout = int(config.get('hss', {}).get('diameter_request_timeout', 10))
self.diameterPeerKey = config.get('hss', {}).get('diameter_peer_key', 'diameterPeers')
self.useDraFallback = config.get('hss', {}).get('use_dra_fallback', False)
@@ -710,6 +720,20 @@ def get_avp_data(self, avps, avp_code):
misc_data.append(sub_avp['misc_data'])
return misc_data
+ def get_auth_sqn_ind_from_avp_data(self, avps):
+ try:
+ OriginHost = self.get_avp_data(avps, 264)[0]
+ OriginRealm = self.get_avp_data(avps, 296)[0]
+ return self.database.Get_AUTH_SQN_IND(f"diameter:host={originHost},realm={originRealm}")
+ except:
+ self.logTool.log(
+ service='HSS',
+ level='error',
+ message=f"Failed to get SQN IND: {traceback.format_exc()}",
+ redisClient=self.redisMessaging
+ )
+ return 0
+
def decode_diameter_packet_length(self, data):
packet_vars = {}
data = data.hex()
@@ -2298,6 +2322,7 @@ def Answer_16777251_318(self, packet_vars, avps):
requested_vectors = 1
EUTRAN_Authentication_Info = self.get_avp_data(avps, 1408)
self.logTool.log(service='HSS', level='debug', message=f"authInfo: {EUTRAN_Authentication_Info}", redisClient=self.redisMessaging)
+ ind = self.get_auth_sqn_ind_from_avp_data(avps)
if len(EUTRAN_Authentication_Info) > 0:
EUTRAN_Authentication_Info = EUTRAN_Authentication_Info[0]
self.logTool.log(service='HSS', level='debug', message="AVP: Requested-EUTRAN-Authentication-Info(1408) l=44 f=VM- vnd=TGPP", redisClient=self.redisMessaging)
@@ -2323,7 +2348,7 @@ def Answer_16777251_318(self, packet_vars, avps):
rand = str(sub_avp['misc_data'])[:32]
rand = binascii.unhexlify(rand)
#Calculate correct SQN
- self.database.Get_Vectors_AuC(subscriber_details['auc_id'], "sqn_resync", auts=auts, rand=rand)
+ self.database.Get_Vectors_AuC_sqn_resync(subscriber_details['auc_id'], auts, rand)
#Get number of requested vectors
if sub_avp['avp_code'] == 1410:
@@ -2338,7 +2363,7 @@ def Answer_16777251_318(self, packet_vars, avps):
while requested_vectors != 0:
self.logTool.log(service='HSS', level='debug', message="Generating vector number " + str(requested_vectors), redisClient=self.redisMessaging)
plmn = self.get_avp_data(avps, 1407)[0] #Get PLMN from request
- vector_dict = self.database.Get_Vectors_AuC(subscriber_details['auc_id'], "air", plmn=plmn)
+ vector_dict = self.database.Get_Vectors_AuC_air(subscriber_details['auc_id'], plmn, ind)
eutranvector = '' #This goes into the payload of AVP 10415 (Authentication info)
eutranvector += self.generate_vendor_avp(1419, "c0", 10415, self.int_to_hex(requested_vectors, 4))
eutranvector += self.generate_vendor_avp(1447, "c0", 10415, vector_dict['rand']) #And is made up of other AVPs joined together with RAND
@@ -3185,6 +3210,7 @@ def Answer_16777216_303(self, packet_vars, avps):
mcc, mnc = imsi[0:3], imsi[3:5]
plmn = self.EncodePLMN(mcc, mnc)
+ ind = self.get_auth_sqn_ind_from_avp_data(avps)
#Determine if SQN Resync is required & auth type to use
for sub_avp_612 in self.get_avp_data(avps, 612)[0]:
@@ -3193,7 +3219,7 @@ def Answer_16777216_303(self, packet_vars, avps):
auts = str(sub_avp_612['misc_data'])[32:]
rand = str(sub_avp_612['misc_data'])[:32]
rand = binascii.unhexlify(rand)
- self.database.Get_Vectors_AuC(subscriber_details['auc_id'], "sqn_resync", auts=auts, rand=rand)
+ self.database.Get_Vectors_AuC_sqn_resync(subscriber_details['auc_id'], auts, rand)
self.logTool.log(service='HSS', level='debug', message="Resynced SQN in DB", redisClient=self.redisMessaging)
self.redisMessaging.sendMetric(serviceName='diameter', metricName='prom_diam_auth_event_count',
metricType='counter', metricAction='inc',
@@ -3222,7 +3248,7 @@ def Answer_16777216_303(self, packet_vars, avps):
#Determine Vectors to Generate
if auth_scheme == "Digest-MD5":
self.logTool.log(service='HSS', level='debug', message="Generating MD5 Challenge", redisClient=self.redisMessaging)
- vector_dict = self.database.Get_Vectors_AuC(subscriber_details['auc_id'], "Digest-MD5", username=imsi, plmn=plmn)
+ vector_dict = self.database.Get_Vectors_AuC_digest_md5(subscriber_details['auc_id'])
avp_SIP_Item_Number = self.generate_vendor_avp(613, "c0", 10415, format(int(0),"x").zfill(8))
avp_SIP_Authentication_Scheme = self.generate_vendor_avp(608, "c0", 10415, str(binascii.hexlify(b'Digest-MD5'),'ascii'))
#Nonce
@@ -3232,7 +3258,7 @@ def Answer_16777216_303(self, packet_vars, avps):
auth_data_item = avp_SIP_Item_Number + avp_SIP_Authentication_Scheme + avp_SIP_Authenticate + avp_SIP_Authorization
else:
self.logTool.log(service='HSS', level='debug', message="Generating AKA-MD5 Auth Challenge", redisClient=self.redisMessaging)
- vector_dict = self.database.Get_Vectors_AuC(subscriber_details['auc_id'], "sip_auth", plmn=plmn)
+ vector_dict = self.database.Get_Vectors_AuC_sip_auth(subscriber_details['auc_id'], plmn, ind)
#diameter.3GPP-SIP-Auth-Data-Items:
diff --git a/lib/gsup/controller/air.py b/lib/gsup/controller/air.py
index a2b24ea5..3c505e53 100644
--- a/lib/gsup/controller/air.py
+++ b/lib/gsup/controller/air.py
@@ -59,22 +59,22 @@ async def handle_message(self, peer: IPAPeer, message: GsupMessage):
subscriber = self._database.Get_Subscriber(imsi=imsi)
rand = GsupMessageUtil.get_first_ie_by_name('rand', request_dict)
auts = GsupMessageUtil.get_first_ie_by_name('auts', request_dict)
+ client_name = f"gsup:sernr={peer.tags['SERNR']}".rstrip("\x00")
+ ind = self._database.Get_AUTH_SQN_IND(client_name)
resync_required = rand is not None and auts is not None
if resync_required:
- self._database.Get_Vectors_AuC(subscriber['auc_id'], 'sqn_resync', rand=rand, auts=auts.hex())
-
- # Use request_vectors=1 as Get_Vectors_AuC currently doesn't
- # increment SEQ for each requested vector:
- # https://github.com/nickvsnetworking/pyhss/issues/266
- vectors = []
- for i in range(self.get_num_vectors_req(request_dict)):
- vectors += self._database.Get_Vectors_AuC(subscriber['auc_id'], '2g3g', requested_vectors=1)
+ self._database.Get_Vectors_AuC_sqn_resync(subscriber['auc_id'], auts.hex(), rand)
response_msg = ((GsupMessageBuilder()
.with_msg_type(MsgType.SEND_AUTH_INFO_RESULT))
.with_ie('imsi', imsi))
+ vectors = self._database.Get_Vectors_AuC_2g3g(
+ subscriber['auc_id'],
+ ind,
+ self.get_num_vectors_req(request_dict),
+ )
for vector in vectors:
response_msg.with_ie('auth_tuple', [vector], False)
@@ -91,7 +91,7 @@ async def handle_message(self, peer: IPAPeer, message: GsupMessage):
.build(),
)
except ValueError as e:
- await self._logger.logAsync(service='GSUP', level='WARN', message=f"Subscriber not found: {imsi}")
+ await self._logger.logAsync(service='GSUP', level='WARN', message=f"Subscriber not found: {imsi}, {traceback.format_exc()}")
await self._send_gsup_response(
peer,
GsupMessageBuilder().with_msg_type(MsgType.SEND_AUTH_INFO_ERROR)
diff --git a/lib/gsup/protocol/ipa_peer.py b/lib/gsup/protocol/ipa_peer.py
index 1d71ecb6..8bdaba3a 100644
--- a/lib/gsup/protocol/ipa_peer.py
+++ b/lib/gsup/protocol/ipa_peer.py
@@ -67,5 +67,9 @@ def __init__(self, name: str, tags: dict, reader: StreamReader, writer: StreamWr
raise ValueError(
"Role not found in tags. 'sgsn' or 'msc' must appear in one of there tags: " + ', '.join(
self._ROLE_PREFERENCE_TAGS))
+
+ if "SERNR" not in tags:
+ raise ValueError("Client didn't send tag SERNR")
+
def __str__(self):
- return f"[{self.name} ({self.role.name})]"
\ No newline at end of file
+ return f"[{self.name} ({self.role.name})]"
diff --git a/lib/gsup/protocol/osmocom_ipa.py b/lib/gsup/protocol/osmocom_ipa.py
index 6fc1cd38..1695923e 100644
--- a/lib/gsup/protocol/osmocom_ipa.py
+++ b/lib/gsup/protocol/osmocom_ipa.py
@@ -213,7 +213,7 @@ def req_identity(self, id_tags=None):
"""
encoded = bytearray()
if id_tags is None:
- id_tags = ['UNIT', 'MACADDR', 'TYPE', 'SWVERSION', 'LOCATION', 'UNITNAME']
+ id_tags = ['UNIT', 'MACADDR', 'TYPE', 'SWVERSION', 'LOCATION', 'UNITNAME', 'SERNR']
for tag in id_tags:
if tag not in self._IDTAG:
@@ -250,4 +250,4 @@ def id_resp(self, data):
"""
Make ID_RESP CCM message
"""
- return self.add_header(data, self.PROTO['CCM'], self.MSGT['ID_RESP'])
\ No newline at end of file
+ return self.add_header(data, self.PROTO['CCM'], self.MSGT['ID_RESP'])
diff --git a/lib/gsup/server.py b/lib/gsup/server.py
index 0bef453b..9fce6ed8 100644
--- a/lib/gsup/server.py
+++ b/lib/gsup/server.py
@@ -239,4 +239,4 @@ async def _listen_for_subscriber_updates(self):
except Exception as e:
await self.logger.logAsync(service='GSUP', level='ERROR',
- message=f"Error processing subscriber update: {traceback.format_exc()}")
\ No newline at end of file
+ message=f"Error processing subscriber update: {traceback.format_exc()}")
diff --git a/pyproject.toml b/pyproject.toml
index 594c6544..9f09536e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -41,3 +41,14 @@ filterwarnings = [
# https://github.com/python/cpython/issues/135834
"ignore:The default datetime adapter is deprecated as of Python 3.12"
]
+
+[tool.ruff]
+line-length = 120
+include = [
+ "lib/SQN.py",
+ "lib/databaseSchema.py",
+ "lib/pyhss_config.py",
+ "tests/test_database_upgrade.py",
+ "tests/test_license_headers.py",
+ "tests/test_milenage.py",
+]
diff --git a/services/apiService.py b/services/apiService.py
index 599b3dbb..dad7926b 100755
--- a/services/apiService.py
+++ b/services/apiService.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python3
# Copyright 2022-2025 Nick
# Copyright 2023-2025 David Kneipp
# Copyright 2025 sysmocom - s.f.m.c. GmbH
@@ -497,7 +498,7 @@ def get(self, imsi, plmn):
#Get data from AuC
auc_data = databaseClient.Get_AuC(imsi=imsi)
plmn = diameterClient.EncodePLMN(mcc=plmn[0:3], mnc=plmn[3:])
- vector_dict = databaseClient.Get_Vectors_AuC(auc_data['auc_id'], action='eap_aka', plmn=plmn)
+ vector_dict = databaseClient.Get_Vectors_AuC_eap_aka(auc_data['auc_id'], plmn)
return vector_dict, 200
except Exception as E:
print(E)
@@ -511,7 +512,7 @@ def get(self, imsi, vector_count):
#Get data from AuC
auc_data = databaseClient.Get_AuC(imsi=imsi)
plmn = diameterClient.EncodePLMN(mcc=config['hss']['MCC'], mnc=config['hss']['MNC'])
- vector_dict = databaseClient.Get_Vectors_AuC(auc_data['auc_id'], action='aka', plmn=plmn, requested_vectors=int(vector_count))
+ vector_dict = databaseClient.Get_Vectors_AuC_aka(auc_data['auc_id'], plmn, int(vector_count))
return vector_dict, 200
except Exception as E:
print(E)
diff --git a/services/databaseService.py b/services/databaseService.py
old mode 100644
new mode 100755
index 94bb4e2c..2527ea08
--- a/services/databaseService.py
+++ b/services/databaseService.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python3
# Copyright 2024 David Kneipp
# Copyright 2025 Victor Seva
# Copyright 2025 sysmocom - s.f.m.c. GmbH
diff --git a/services/diameterService.py b/services/diameterService.py
index 2ba47d1a..90795e26 100755
--- a/services/diameterService.py
+++ b/services/diameterService.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python3
# Copyright 2023-2024 David Kneipp
# Copyright 2025 sysmocom - s.f.m.c. GmbH
# SPDX-License-Identifier: AGPL-3.0-or-later
diff --git a/services/georedService.py b/services/georedService.py
index 8ee4ce15..35713796 100755
--- a/services/georedService.py
+++ b/services/georedService.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python3
# Copyright 2019-2021 Nick
# Copyright 2023-2025 David Kneipp
# SPDX-License-Identifier: AGPL-3.0-or-later
diff --git a/services/gsupService.py b/services/gsupService.py
old mode 100644
new mode 100755
index aa64ac61..76b3fe83
--- a/services/gsupService.py
+++ b/services/gsupService.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python3
# PyHSS GSUP Service
# Copyright 2025 Lennart Rosam
# Copyright 2025 Alexander Couzens
diff --git a/services/hssService.py b/services/hssService.py
index c5db43d1..2f95279f 100755
--- a/services/hssService.py
+++ b/services/hssService.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python3
# Copyright 2023-2025 David Kneipp
# SPDX-License-Identifier: AGPL-3.0-or-later
import os, sys, json, time, traceback, socket
@@ -29,7 +30,15 @@ def __init__(self):
self.originHost = config.get('hss', {}).get('OriginHost', f'hss01')
self.productName = config.get('hss', {}).get('ProductName', f'PyHSS')
self.logTool.log(service='HSS', level='info', message=f"{self.banners.hssService()}", redisClient=self.redisMessaging)
- self.diameterLibrary = Diameter(logTool=self.logTool, originHost=self.originHost, originRealm=self.originRealm, productName=self.productName, mcc=self.mcc, mnc=self.mnc)
+ self.diameterLibrary = Diameter(
+ logTool=self.logTool,
+ originHost=self.originHost,
+ originRealm=self.originRealm,
+ productName=self.productName,
+ mcc=self.mcc,
+ mnc=self.mnc,
+ main_service=True,
+ )
self.benchmarking = config.get('hss').get('enable_benchmarking', False)
self.hostname = self.originHost
self.diameterPeerKey = config.get('hss', {}).get('diameter_peer_key', 'diameterPeers')
diff --git a/services/logService.py b/services/logService.py
index e01da65a..cd682120 100755
--- a/services/logService.py
+++ b/services/logService.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python3
# Copyright 2023-2024 David Kneipp
# SPDX-License-Identifier: AGPL-3.0-or-later
import os, sys, json, socket
diff --git a/services/metricService.py b/services/metricService.py
index 64112b11..af6a316a 100755
--- a/services/metricService.py
+++ b/services/metricService.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python3
# Copyright 2023-2024 David Kneipp
# SPDX-License-Identifier: AGPL-3.0-or-later
import asyncio
diff --git a/tests/db_schema/20231009_release_1.0.0.sql b/tests/db_schema/20231009_release_1.0.0.sql
new file mode 100644
index 00000000..0c9acc9e
--- /dev/null
+++ b/tests/db_schema/20231009_release_1.0.0.sql
@@ -0,0 +1,203 @@
+BEGIN TRANSACTION;
+CREATE TABLE apn (
+ apn_id INTEGER NOT NULL,
+ apn VARCHAR(50) NOT NULL,
+ ip_version INTEGER,
+ pgw_address VARCHAR(50),
+ sgw_address VARCHAR(50),
+ charging_characteristics VARCHAR(4),
+ apn_ambr_dl INTEGER NOT NULL,
+ apn_ambr_ul INTEGER NOT NULL,
+ qci INTEGER,
+ arp_priority INTEGER,
+ arp_preemption_capability BOOLEAN,
+ arp_preemption_vulnerability BOOLEAN,
+ charging_rule_list VARCHAR(18),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (apn_id)
+);
+CREATE TABLE auc (
+ auc_id INTEGER NOT NULL,
+ ki VARCHAR(32) NOT NULL,
+ opc VARCHAR(32) NOT NULL,
+ amf VARCHAR(4) NOT NULL,
+ sqn BIGINT,
+ iccid VARCHAR(20),
+ imsi VARCHAR(18),
+ batch_name VARCHAR(20),
+ sim_vendor VARCHAR(20),
+ esim BOOLEAN,
+ lpa VARCHAR(128),
+ pin1 VARCHAR(20),
+ pin2 VARCHAR(20),
+ puk1 VARCHAR(20),
+ puk2 VARCHAR(20),
+ kid VARCHAR(20),
+ psk VARCHAR(128),
+ des VARCHAR(128),
+ adm1 VARCHAR(20),
+ misc1 VARCHAR(128),
+ misc2 VARCHAR(128),
+ misc3 VARCHAR(128),
+ misc4 VARCHAR(128),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (auc_id),
+ UNIQUE (iccid),
+ UNIQUE (imsi)
+);
+CREATE TABLE charging_rule (
+ charging_rule_id INTEGER NOT NULL,
+ rule_name VARCHAR(20),
+ qci INTEGER,
+ arp_priority INTEGER,
+ arp_preemption_capability BOOLEAN,
+ arp_preemption_vulnerability BOOLEAN,
+ mbr_dl INTEGER NOT NULL,
+ mbr_ul INTEGER NOT NULL,
+ gbr_dl INTEGER NOT NULL,
+ gbr_ul INTEGER NOT NULL,
+ tft_group_id INTEGER,
+ precedence INTEGER,
+ rating_group INTEGER,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (charging_rule_id)
+);
+CREATE TABLE eir (
+ eir_id INTEGER NOT NULL,
+ imei VARCHAR(60),
+ imsi VARCHAR(60),
+ regex_mode INTEGER,
+ match_response_code INTEGER,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (eir_id)
+);
+CREATE TABLE eir_history (
+ imsi_imei_history_id INTEGER NOT NULL,
+ imsi_imei VARCHAR(60),
+ match_response_code INTEGER,
+ imsi_imei_timestamp DATETIME,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (imsi_imei_history_id),
+ UNIQUE (imsi_imei)
+);
+CREATE TABLE ims_subscriber (
+ ims_subscriber_id INTEGER NOT NULL,
+ msisdn VARCHAR(18),
+ msisdn_list VARCHAR(1200),
+ imsi VARCHAR(18),
+ ifc_path VARCHAR(18),
+ pcscf VARCHAR(512),
+ pcscf_realm VARCHAR(512),
+ pcscf_active_session VARCHAR(512),
+ pcscf_timestamp DATETIME,
+ pcscf_peer VARCHAR(512),
+ sh_profile TEXT(12000),
+ scscf VARCHAR(512),
+ scscf_timestamp DATETIME,
+ scscf_realm VARCHAR(512),
+ scscf_peer VARCHAR(512),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (ims_subscriber_id),
+ UNIQUE (msisdn)
+);
+CREATE TABLE operation_log (
+ id INTEGER NOT NULL,
+ item_id INTEGER NOT NULL,
+ operation_id VARCHAR(36) NOT NULL,
+ operation VARCHAR(10),
+ changes TEXT,
+ last_modified VARCHAR(100),
+ timestamp DATETIME,
+ table_name VARCHAR(255),
+ apn_id INTEGER,
+ subscriber_routing_id INTEGER,
+ serving_apn_id INTEGER,
+ auc_id INTEGER,
+ subscriber_id INTEGER,
+ ims_subscriber_id INTEGER,
+ charging_rule_id INTEGER,
+ tft_id INTEGER,
+ eir_id INTEGER,
+ imsi_imei_history_id INTEGER,
+ subscriber_attributes_id INTEGER,
+ PRIMARY KEY (id),
+ FOREIGN KEY(apn_id) REFERENCES apn (apn_id),
+ FOREIGN KEY(subscriber_routing_id) REFERENCES subscriber_routing (subscriber_routing_id),
+ FOREIGN KEY(serving_apn_id) REFERENCES serving_apn (serving_apn_id),
+ FOREIGN KEY(auc_id) REFERENCES auc (auc_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id),
+ FOREIGN KEY(ims_subscriber_id) REFERENCES ims_subscriber (ims_subscriber_id),
+ FOREIGN KEY(charging_rule_id) REFERENCES charging_rule (charging_rule_id),
+ FOREIGN KEY(tft_id) REFERENCES tft (tft_id),
+ FOREIGN KEY(eir_id) REFERENCES eir (eir_id),
+ FOREIGN KEY(imsi_imei_history_id) REFERENCES eir_history (imsi_imei_history_id),
+ FOREIGN KEY(subscriber_attributes_id) REFERENCES subscriber_attributes (subscriber_attributes_id)
+);
+CREATE TABLE serving_apn (
+ serving_apn_id INTEGER NOT NULL,
+ subscriber_id INTEGER,
+ apn INTEGER,
+ pcrf_session_id VARCHAR(100),
+ subscriber_routing VARCHAR(100),
+ ip_version INTEGER,
+ serving_pgw VARCHAR(512),
+ serving_pgw_timestamp DATETIME,
+ serving_pgw_realm VARCHAR(512),
+ serving_pgw_peer VARCHAR(512),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (serving_apn_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE,
+ FOREIGN KEY(apn) REFERENCES apn (apn_id) ON DELETE CASCADE
+);
+CREATE TABLE subscriber (
+ subscriber_id INTEGER NOT NULL,
+ imsi VARCHAR(18),
+ enabled BOOLEAN,
+ auc_id INTEGER NOT NULL,
+ default_apn INTEGER NOT NULL,
+ apn_list VARCHAR(64) NOT NULL,
+ msisdn VARCHAR(18),
+ ue_ambr_dl INTEGER,
+ ue_ambr_ul INTEGER,
+ nam INTEGER,
+ subscribed_rau_tau_timer INTEGER,
+ serving_mme VARCHAR(512),
+ serving_mme_timestamp DATETIME,
+ serving_mme_realm VARCHAR(512),
+ serving_mme_peer VARCHAR(512),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (subscriber_id),
+ UNIQUE (imsi),
+ FOREIGN KEY(auc_id) REFERENCES auc (auc_id),
+ FOREIGN KEY(default_apn) REFERENCES apn (apn_id)
+);
+CREATE TABLE subscriber_attributes (
+ subscriber_attributes_id INTEGER NOT NULL,
+ subscriber_id INTEGER NOT NULL,
+ "key" VARCHAR(60),
+ last_modified VARCHAR(100),
+ value VARCHAR(12000),
+ PRIMARY KEY (subscriber_attributes_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE
+);
+CREATE TABLE subscriber_routing (
+ subscriber_routing_id INTEGER NOT NULL,
+ subscriber_id INTEGER,
+ apn_id INTEGER,
+ ip_version INTEGER,
+ ip_address VARCHAR(254),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (subscriber_routing_id),
+ UNIQUE (subscriber_id, apn_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE,
+ FOREIGN KEY(apn_id) REFERENCES apn (apn_id) ON DELETE CASCADE
+);
+CREATE TABLE tft (
+ tft_id INTEGER NOT NULL,
+ tft_group_id INTEGER NOT NULL,
+ tft_string VARCHAR(100) NOT NULL,
+ direction INTEGER NOT NULL,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (tft_id)
+);
+COMMIT;
diff --git a/tests/db_schema/20240125_release_1.0.1.sql b/tests/db_schema/20240125_release_1.0.1.sql
new file mode 100644
index 00000000..c875bd95
--- /dev/null
+++ b/tests/db_schema/20240125_release_1.0.1.sql
@@ -0,0 +1,257 @@
+BEGIN TRANSACTION;
+CREATE TABLE apn (
+ apn_id INTEGER NOT NULL,
+ apn VARCHAR(50) NOT NULL,
+ ip_version INTEGER,
+ pgw_address VARCHAR(50),
+ sgw_address VARCHAR(50),
+ charging_characteristics VARCHAR(4),
+ apn_ambr_dl INTEGER NOT NULL,
+ apn_ambr_ul INTEGER NOT NULL,
+ qci INTEGER,
+ arp_priority INTEGER,
+ arp_preemption_capability BOOLEAN,
+ arp_preemption_vulnerability BOOLEAN,
+ charging_rule_list VARCHAR(18),
+ nbiot BOOLEAN,
+ nidd_scef_id VARCHAR(512),
+ nidd_scef_realm VARCHAR(512),
+ nidd_mechanism INTEGER,
+ nidd_rds INTEGER,
+ nidd_preferred_data_mode INTEGER,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (apn_id)
+);
+CREATE TABLE auc (
+ auc_id INTEGER NOT NULL,
+ ki VARCHAR(32) NOT NULL,
+ opc VARCHAR(32) NOT NULL,
+ amf VARCHAR(4) NOT NULL,
+ sqn BIGINT,
+ iccid VARCHAR(20),
+ imsi VARCHAR(18),
+ batch_name VARCHAR(20),
+ sim_vendor VARCHAR(20),
+ esim BOOLEAN,
+ lpa VARCHAR(128),
+ pin1 VARCHAR(20),
+ pin2 VARCHAR(20),
+ puk1 VARCHAR(20),
+ puk2 VARCHAR(20),
+ kid VARCHAR(20),
+ psk VARCHAR(128),
+ des VARCHAR(128),
+ adm1 VARCHAR(20),
+ misc1 VARCHAR(128),
+ misc2 VARCHAR(128),
+ misc3 VARCHAR(128),
+ misc4 VARCHAR(128),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (auc_id),
+ UNIQUE (iccid),
+ UNIQUE (imsi)
+);
+CREATE TABLE charging_rule (
+ charging_rule_id INTEGER NOT NULL,
+ rule_name VARCHAR(20),
+ qci INTEGER,
+ arp_priority INTEGER,
+ arp_preemption_capability BOOLEAN,
+ arp_preemption_vulnerability BOOLEAN,
+ mbr_dl INTEGER NOT NULL,
+ mbr_ul INTEGER NOT NULL,
+ gbr_dl INTEGER NOT NULL,
+ gbr_ul INTEGER NOT NULL,
+ tft_group_id INTEGER,
+ precedence INTEGER,
+ rating_group INTEGER,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (charging_rule_id)
+);
+CREATE TABLE eir (
+ eir_id INTEGER NOT NULL,
+ imei VARCHAR(60),
+ imsi VARCHAR(60),
+ regex_mode INTEGER,
+ match_response_code INTEGER,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (eir_id)
+);
+CREATE TABLE eir_history (
+ imsi_imei_history_id INTEGER NOT NULL,
+ imsi_imei VARCHAR(60),
+ match_response_code INTEGER,
+ imsi_imei_timestamp DATETIME,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (imsi_imei_history_id),
+ UNIQUE (imsi_imei)
+);
+CREATE TABLE emergency_subscriber (
+ emergency_subscriber_id INTEGER NOT NULL,
+ imsi VARCHAR(18),
+ serving_pgw VARCHAR(512),
+ serving_pgw_timestamp VARCHAR(512),
+ serving_pcscf VARCHAR(512),
+ serving_pcscf_timestamp VARCHAR(512),
+ gx_origin_realm VARCHAR(512),
+ gx_origin_host VARCHAR(512),
+ rat_type VARCHAR(512),
+ ip VARCHAR(512),
+ access_network_gateway_address VARCHAR(512),
+ access_network_charging_address VARCHAR(512),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (emergency_subscriber_id)
+);
+-- NOTE: xcap_profile and sh_profile might also be TEXT(12000):
+-- https://github.com/nickvsnetworking/pyhss/commit/7d66298698d92176be1fef212de409a3ecfcdaf6
+-- But migrating this with sqlite is not trivial (no ALTER TABLE MODIFY). In
+-- practice it won't matter so let's just assume TEXT here.
+CREATE TABLE ims_subscriber (
+ ims_subscriber_id INTEGER NOT NULL,
+ msisdn VARCHAR(18),
+ msisdn_list VARCHAR(1200),
+ imsi VARCHAR(18),
+ ifc_path VARCHAR(512),
+ pcscf VARCHAR(512),
+ pcscf_realm VARCHAR(512),
+ pcscf_active_session VARCHAR(512),
+ pcscf_timestamp DATETIME,
+ pcscf_peer VARCHAR(512),
+ xcap_profile TEXT,
+ sh_profile TEXT,
+ scscf VARCHAR(512),
+ scscf_timestamp DATETIME,
+ scscf_realm VARCHAR(512),
+ scscf_peer VARCHAR(512),
+ sh_template_path VARCHAR(512),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (ims_subscriber_id),
+ UNIQUE (msisdn)
+);
+CREATE TABLE operation_log (
+ id INTEGER NOT NULL,
+ item_id INTEGER NOT NULL,
+ operation_id VARCHAR(36) NOT NULL,
+ operation VARCHAR(10),
+ changes TEXT,
+ last_modified VARCHAR(100),
+ timestamp DATETIME,
+ table_name VARCHAR(255),
+ apn_id INTEGER,
+ subscriber_routing_id INTEGER,
+ serving_apn_id INTEGER,
+ auc_id INTEGER,
+ subscriber_id INTEGER,
+ ims_subscriber_id INTEGER,
+ roaming_rule_id INTEGER,
+ roaming_network_id INTEGER,
+ emergency_subscriber_id INTEGER,
+ charging_rule_id INTEGER,
+ tft_id INTEGER,
+ eir_id INTEGER,
+ imsi_imei_history_id INTEGER,
+ subscriber_attributes_id INTEGER,
+ PRIMARY KEY (id),
+ FOREIGN KEY(apn_id) REFERENCES apn (apn_id),
+ FOREIGN KEY(subscriber_routing_id) REFERENCES subscriber_routing (subscriber_routing_id),
+ FOREIGN KEY(serving_apn_id) REFERENCES serving_apn (serving_apn_id),
+ FOREIGN KEY(auc_id) REFERENCES auc (auc_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id),
+ FOREIGN KEY(ims_subscriber_id) REFERENCES ims_subscriber (ims_subscriber_id),
+ FOREIGN KEY(roaming_rule_id) REFERENCES roaming_rule (roaming_rule_id),
+ FOREIGN KEY(roaming_network_id) REFERENCES roaming_network (roaming_network_id),
+ FOREIGN KEY(emergency_subscriber_id) REFERENCES emergency_subscriber (emergency_subscriber_id),
+ FOREIGN KEY(charging_rule_id) REFERENCES charging_rule (charging_rule_id),
+ FOREIGN KEY(tft_id) REFERENCES tft (tft_id),
+ FOREIGN KEY(eir_id) REFERENCES eir (eir_id),
+ FOREIGN KEY(imsi_imei_history_id) REFERENCES eir_history (imsi_imei_history_id),
+ FOREIGN KEY(subscriber_attributes_id) REFERENCES subscriber_attributes (subscriber_attributes_id)
+);
+CREATE TABLE roaming_network (
+ roaming_network_id INTEGER NOT NULL,
+ name VARCHAR(512),
+ preference INTEGER,
+ mcc VARCHAR(100),
+ mnc VARCHAR(100),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (roaming_network_id)
+);
+CREATE TABLE roaming_rule (
+ roaming_rule_id INTEGER NOT NULL,
+ roaming_network_id INTEGER,
+ allow BOOLEAN,
+ enabled BOOLEAN,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (roaming_rule_id),
+ FOREIGN KEY(roaming_network_id) REFERENCES roaming_network (roaming_network_id) ON DELETE CASCADE
+);
+CREATE TABLE serving_apn (
+ serving_apn_id INTEGER NOT NULL,
+ subscriber_id INTEGER,
+ apn INTEGER,
+ pcrf_session_id VARCHAR(100),
+ subscriber_routing VARCHAR(100),
+ ip_version INTEGER,
+ serving_pgw VARCHAR(512),
+ serving_pgw_timestamp DATETIME,
+ serving_pgw_realm VARCHAR(512),
+ serving_pgw_peer VARCHAR(512),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (serving_apn_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE,
+ FOREIGN KEY(apn) REFERENCES apn (apn_id) ON DELETE CASCADE
+);
+CREATE TABLE subscriber (
+ subscriber_id INTEGER NOT NULL,
+ imsi VARCHAR(18),
+ enabled BOOLEAN,
+ auc_id INTEGER NOT NULL,
+ default_apn INTEGER NOT NULL,
+ apn_list VARCHAR(64) NOT NULL,
+ msisdn VARCHAR(18),
+ ue_ambr_dl INTEGER,
+ ue_ambr_ul INTEGER,
+ nam INTEGER,
+ roaming_enabled BOOLEAN,
+ roaming_rule_list VARCHAR(512),
+ subscribed_rau_tau_timer INTEGER,
+ serving_mme VARCHAR(512),
+ serving_mme_timestamp DATETIME,
+ serving_mme_realm VARCHAR(512),
+ serving_mme_peer VARCHAR(512),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (subscriber_id),
+ UNIQUE (imsi),
+ FOREIGN KEY(auc_id) REFERENCES auc (auc_id),
+ FOREIGN KEY(default_apn) REFERENCES apn (apn_id)
+);
+CREATE TABLE subscriber_attributes (
+ subscriber_attributes_id INTEGER NOT NULL,
+ subscriber_id INTEGER NOT NULL,
+ "key" VARCHAR(60),
+ last_modified VARCHAR(100),
+ value VARCHAR(12000),
+ PRIMARY KEY (subscriber_attributes_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE
+);
+CREATE TABLE subscriber_routing (
+ subscriber_routing_id INTEGER NOT NULL,
+ subscriber_id INTEGER,
+ apn_id INTEGER,
+ ip_version INTEGER,
+ ip_address VARCHAR(254),
+ last_modified VARCHAR(100),
+ PRIMARY KEY (subscriber_routing_id),
+ UNIQUE (subscriber_id, apn_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE,
+ FOREIGN KEY(apn_id) REFERENCES apn (apn_id) ON DELETE CASCADE
+);
+CREATE TABLE tft (
+ tft_id INTEGER NOT NULL,
+ tft_group_id INTEGER NOT NULL,
+ tft_string VARCHAR(100) NOT NULL,
+ direction INTEGER NOT NULL,
+ last_modified VARCHAR(100),
+ PRIMARY KEY (tft_id)
+);
+COMMIT;
diff --git a/tests/db_schema/latest.sql b/tests/db_schema/latest.sql
new file mode 100644
index 00000000..0eb19d15
--- /dev/null
+++ b/tests/db_schema/latest.sql
@@ -0,0 +1,290 @@
+BEGIN TRANSACTION;
+CREATE TABLE apn (
+ apn VARCHAR(50) NOT NULL,
+ apn_ambr_dl INTEGER NOT NULL,
+ apn_ambr_ul INTEGER NOT NULL,
+ apn_id INTEGER NOT NULL,
+ arp_preemption_capability BOOLEAN,
+ arp_preemption_vulnerability BOOLEAN,
+ arp_priority INTEGER,
+ charging_characteristics VARCHAR(4),
+ charging_rule_list VARCHAR(18),
+ ip_version INTEGER,
+ last_modified VARCHAR(100),
+ nbiot BOOLEAN,
+ nidd_mechanism INTEGER,
+ nidd_preferred_data_mode INTEGER,
+ nidd_rds INTEGER,
+ nidd_scef_id VARCHAR(512),
+ nidd_scef_realm VARCHAR(512),
+ pgw_address VARCHAR(50),
+ qci INTEGER,
+ sgw_address VARCHAR(50),
+ PRIMARY KEY (apn_id)
+);
+CREATE TABLE auc (
+ adm1 VARCHAR(20),
+ algo VARCHAR(20),
+ amf VARCHAR(4) NOT NULL,
+ auc_id INTEGER NOT NULL,
+ batch_name VARCHAR(20),
+ des VARCHAR(128),
+ esim BOOLEAN,
+ iccid VARCHAR(20),
+ imsi VARCHAR(18),
+ ki VARCHAR(32) NOT NULL,
+ kid VARCHAR(20),
+ last_modified VARCHAR(100),
+ lpa VARCHAR(128),
+ misc1 VARCHAR(128),
+ misc2 VARCHAR(128),
+ misc3 VARCHAR(128),
+ misc4 VARCHAR(128),
+ opc VARCHAR(32) NOT NULL,
+ pin1 VARCHAR(20),
+ pin2 VARCHAR(20),
+ psk VARCHAR(128),
+ puk1 VARCHAR(20),
+ puk2 VARCHAR(20),
+ sim_vendor VARCHAR(20),
+ sqn BIGINT,
+ sqn_ind_bitlen INTEGER,
+ PRIMARY KEY (auc_id),
+ UNIQUE (iccid),
+ UNIQUE (imsi)
+);
+CREATE TABLE auth_sqn_ind (
+ client_name VARCHAR(255) NOT NULL,
+ ind_id INTEGER NOT NULL,
+ PRIMARY KEY (ind_id),
+ UNIQUE (client_name)
+);
+CREATE TABLE auth_sqn_seq (
+ auc_id INTEGER,
+ ind_id INTEGER,
+ seq BIGINT,
+ seq_id INTEGER NOT NULL,
+ PRIMARY KEY (seq_id),
+ UNIQUE (auc_id, ind_id),
+ FOREIGN KEY(auc_id) REFERENCES auc (auc_id),
+ FOREIGN KEY(ind_id) REFERENCES auth_sqn_ind (ind_id)
+);
+CREATE TABLE charging_rule (
+ arp_preemption_capability BOOLEAN,
+ arp_preemption_vulnerability BOOLEAN,
+ arp_priority INTEGER,
+ charging_rule_id INTEGER NOT NULL,
+ gbr_dl INTEGER NOT NULL,
+ gbr_ul INTEGER NOT NULL,
+ last_modified VARCHAR(100),
+ mbr_dl INTEGER NOT NULL,
+ mbr_ul INTEGER NOT NULL,
+ precedence INTEGER,
+ qci INTEGER,
+ rating_group INTEGER,
+ rule_name VARCHAR(20),
+ tft_group_id INTEGER,
+ PRIMARY KEY (charging_rule_id)
+);
+CREATE TABLE database_schema_version (
+ comment VARCHAR(512),
+ date DATETIME DEFAULT CURRENT_TIMESTAMP,
+ upgrade_id INTEGER NOT NULL,
+ PRIMARY KEY (upgrade_id)
+);
+CREATE TABLE eir (
+ eir_id INTEGER NOT NULL,
+ imei VARCHAR(60),
+ imsi VARCHAR(60),
+ last_modified VARCHAR(100),
+ match_response_code INTEGER,
+ regex_mode INTEGER,
+ PRIMARY KEY (eir_id)
+);
+CREATE TABLE eir_history (
+ imsi_imei VARCHAR(60),
+ imsi_imei_history_id INTEGER NOT NULL,
+ imsi_imei_timestamp DATETIME,
+ last_modified VARCHAR(100),
+ match_response_code INTEGER,
+ PRIMARY KEY (imsi_imei_history_id),
+ UNIQUE (imsi_imei)
+);
+CREATE TABLE emergency_subscriber (
+ access_network_charging_address VARCHAR(512),
+ access_network_gateway_address VARCHAR(512),
+ emergency_subscriber_id INTEGER NOT NULL,
+ gx_origin_host VARCHAR(512),
+ gx_origin_realm VARCHAR(512),
+ imsi VARCHAR(18),
+ ip VARCHAR(512),
+ last_modified VARCHAR(100),
+ rat_type VARCHAR(512),
+ serving_pcscf VARCHAR(512),
+ serving_pcscf_timestamp VARCHAR(512),
+ serving_pgw VARCHAR(512),
+ serving_pgw_timestamp VARCHAR(512),
+ PRIMARY KEY (emergency_subscriber_id)
+);
+CREATE TABLE ims_subscriber (
+ ifc_path VARCHAR(512),
+ ims_subscriber_id INTEGER NOT NULL,
+ imsi VARCHAR(18),
+ last_modified VARCHAR(100),
+ msisdn VARCHAR(18),
+ msisdn_list VARCHAR(1200),
+ pcscf VARCHAR(512),
+ pcscf_active_session VARCHAR(512),
+ pcscf_peer VARCHAR(512),
+ pcscf_realm VARCHAR(512),
+ pcscf_timestamp DATETIME,
+ scscf VARCHAR(512),
+ scscf_peer VARCHAR(512),
+ scscf_realm VARCHAR(512),
+ scscf_timestamp DATETIME,
+ sh_profile TEXT,
+ sh_template_path VARCHAR(512),
+ xcap_profile TEXT,
+ PRIMARY KEY (ims_subscriber_id),
+ UNIQUE (msisdn)
+);
+CREATE TABLE operation_log (
+ apn_id INTEGER,
+ auc_id INTEGER,
+ changes TEXT,
+ charging_rule_id INTEGER,
+ eir_id INTEGER,
+ emergency_subscriber_id INTEGER,
+ id INTEGER NOT NULL,
+ ims_subscriber_id INTEGER,
+ imsi_imei_history_id INTEGER,
+ item_id INTEGER NOT NULL,
+ last_modified VARCHAR(100),
+ operation VARCHAR(10),
+ operation_id VARCHAR(36) NOT NULL,
+ roaming_network_id INTEGER,
+ roaming_rule_id INTEGER,
+ serving_apn_id INTEGER,
+ subscriber_attributes_id INTEGER,
+ subscriber_id INTEGER,
+ subscriber_routing_id INTEGER,
+ table_name VARCHAR(255),
+ tft_id INTEGER,
+ timestamp DATETIME,
+ PRIMARY KEY (id),
+ FOREIGN KEY(apn_id) REFERENCES apn (apn_id),
+ FOREIGN KEY(subscriber_routing_id) REFERENCES subscriber_routing (subscriber_routing_id),
+ FOREIGN KEY(serving_apn_id) REFERENCES serving_apn (serving_apn_id),
+ FOREIGN KEY(auc_id) REFERENCES auc (auc_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id),
+ FOREIGN KEY(ims_subscriber_id) REFERENCES ims_subscriber (ims_subscriber_id),
+ FOREIGN KEY(roaming_rule_id) REFERENCES roaming_rule (roaming_rule_id),
+ FOREIGN KEY(roaming_network_id) REFERENCES roaming_network (roaming_network_id),
+ FOREIGN KEY(emergency_subscriber_id) REFERENCES emergency_subscriber (emergency_subscriber_id),
+ FOREIGN KEY(charging_rule_id) REFERENCES charging_rule (charging_rule_id),
+ FOREIGN KEY(tft_id) REFERENCES tft (tft_id),
+ FOREIGN KEY(eir_id) REFERENCES eir (eir_id),
+ FOREIGN KEY(imsi_imei_history_id) REFERENCES eir_history (imsi_imei_history_id),
+ FOREIGN KEY(subscriber_attributes_id) REFERENCES subscriber_attributes (subscriber_attributes_id)
+);
+CREATE TABLE roaming_network (
+ last_modified VARCHAR(100),
+ mcc VARCHAR(100),
+ mnc VARCHAR(100),
+ name VARCHAR(512),
+ preference INTEGER,
+ roaming_network_id INTEGER NOT NULL,
+ PRIMARY KEY (roaming_network_id)
+);
+CREATE TABLE roaming_rule (
+ allow BOOLEAN,
+ enabled BOOLEAN,
+ last_modified VARCHAR(100),
+ roaming_network_id INTEGER,
+ roaming_rule_id INTEGER NOT NULL,
+ PRIMARY KEY (roaming_rule_id),
+ FOREIGN KEY(roaming_network_id) REFERENCES roaming_network (roaming_network_id) ON DELETE CASCADE
+);
+CREATE TABLE serving_apn (
+ apn INTEGER,
+ ip_version INTEGER,
+ last_modified VARCHAR(100),
+ pcrf_session_id VARCHAR(100),
+ serving_apn_id INTEGER NOT NULL,
+ serving_pgw VARCHAR(512),
+ serving_pgw_peer VARCHAR(512),
+ serving_pgw_realm VARCHAR(512),
+ serving_pgw_timestamp DATETIME,
+ subscriber_id INTEGER,
+ subscriber_routing VARCHAR(100),
+ PRIMARY KEY (serving_apn_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE,
+ FOREIGN KEY(apn) REFERENCES apn (apn_id) ON DELETE CASCADE
+);
+CREATE TABLE subscriber (
+ apn_list VARCHAR(64) NOT NULL,
+ auc_id INTEGER NOT NULL,
+ default_apn INTEGER NOT NULL,
+ enabled BOOLEAN,
+ imsi VARCHAR(18),
+ last_location_update_timestamp DATETIME,
+ last_modified VARCHAR(100),
+ last_seen_cell_id VARCHAR(64),
+ last_seen_eci VARCHAR(64),
+ last_seen_enodeb_id VARCHAR(64),
+ last_seen_mcc VARCHAR(3),
+ last_seen_mnc VARCHAR(3),
+ last_seen_tac VARCHAR(64),
+ msisdn VARCHAR(18),
+ nam INTEGER,
+ roaming_enabled BOOLEAN,
+ roaming_rule_list VARCHAR(512),
+ serving_mme VARCHAR(512),
+ serving_mme_peer VARCHAR(512),
+ serving_mme_realm VARCHAR(512),
+ serving_mme_timestamp DATETIME,
+ serving_msc VARCHAR(512),
+ serving_msc_timestamp DATETIME,
+ serving_sgsn VARCHAR(512),
+ serving_sgsn_timestamp DATETIME,
+ serving_vlr VARCHAR(512),
+ serving_vlr_timestamp DATETIME,
+ subscribed_rau_tau_timer INTEGER,
+ subscriber_id INTEGER NOT NULL,
+ ue_ambr_dl INTEGER,
+ ue_ambr_ul INTEGER,
+ PRIMARY KEY (subscriber_id),
+ UNIQUE (imsi),
+ FOREIGN KEY(auc_id) REFERENCES auc (auc_id),
+ FOREIGN KEY(default_apn) REFERENCES apn (apn_id)
+);
+CREATE TABLE subscriber_attributes (
+ "key" VARCHAR(60),
+ last_modified VARCHAR(100),
+ subscriber_attributes_id INTEGER NOT NULL,
+ subscriber_id INTEGER NOT NULL,
+ value VARCHAR(12000),
+ PRIMARY KEY (subscriber_attributes_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE
+);
+CREATE TABLE subscriber_routing (
+ apn_id INTEGER,
+ ip_address VARCHAR(254),
+ ip_version INTEGER,
+ last_modified VARCHAR(100),
+ subscriber_id INTEGER,
+ subscriber_routing_id INTEGER NOT NULL,
+ PRIMARY KEY (subscriber_routing_id),
+ UNIQUE (subscriber_id, apn_id),
+ FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE,
+ FOREIGN KEY(apn_id) REFERENCES apn (apn_id) ON DELETE CASCADE
+);
+CREATE TABLE tft (
+ direction INTEGER NOT NULL,
+ last_modified VARCHAR(100),
+ tft_group_id INTEGER NOT NULL,
+ tft_id INTEGER NOT NULL,
+ tft_string VARCHAR(100) NOT NULL,
+ PRIMARY KEY (tft_id)
+);
+COMMIT;
diff --git a/tests/fixtures.py b/tests/fixtures.py
index e11b48dc..0d838f26 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -48,7 +48,7 @@ def create_test_db():
print(f"Removing previous test DB: {test_db}")
os.unlink(test_db)
- db = Database(LogTool(config))
+ db = Database(LogTool(config), main_service=True)
assert os.path.exists(test_db)
db.CreateObj(APN, {
diff --git a/tests/test_API.py b/tests/test_API.py
index 412c0ba6..a83c377b 100644
--- a/tests/test_API.py
+++ b/tests/test_API.py
@@ -96,6 +96,7 @@ class AUC_Tests(unittest.TestCase):
"opc": '44d51018f65affc04e6d56d699df3a76',
"amf": "8000",
"sqn": 99,
+ "sqn_ind_bitlen": None,
'batch_name': None,
'esim': False,
'iccid': None,
diff --git a/tests/test_database.py b/tests/test_database.py
index 742d6c05..1fd153bd 100644
--- a/tests/test_database.py
+++ b/tests/test_database.py
@@ -123,11 +123,9 @@ def test_database(create_test_db):
# Generate Vectors
print("Generating Vectors")
- database.Get_Vectors_AuC(auc_id, "air", plmn="12ff")
- print(database.Get_Vectors_AuC(auc_id, "sip_auth", plmn="12ff"))
-
- # Update AuC
- database.Update_AuC(auc_id, sqn=100)
+ ind = database.Get_AUTH_SQN_IND("test:test_database.py")
+ print(database.Get_Vectors_AuC_air(auc_id, "12ff", ind))
+ print(database.Get_Vectors_AuC_sip_auth(auc_id, "12ff", ind))
# New Subscriber
subscriber_json = {
@@ -225,10 +223,11 @@ def test_database(create_test_db):
# Generate Vectors for IMS Subscriber
print("Generating Vectors for IMS Subscriber")
- print(database.Get_Vectors_AuC(auc_id, "sip_auth", plmn="12ff"))
+ ind = database.Get_AUTH_SQN_IND("test:test_database.py")
+ print(database.Get_Vectors_AuC_sip_auth(auc_id, "12ff", ind))
# print("Generating Resync for IMS Subscriber")
- # print(Get_Vectors_AuC(auc_id, "sqn_resync", auts='7964347dfdfe432289522183fcfb', rand='1bc9f096002d3716c65e4e1f4c1c0d17'))
+ # print(Get_Vectors_AuC_sqn_resync(auc_id, '7964347dfdfe432289522183fcfb', '1bc9f096002d3716c65e4e1f4c1c0d17'))
# Test getting APNs
GetAPN_Result = database.Get_APN(GetSubscriber_Result["default_apn"])
diff --git a/tests/test_database_upgrade.py b/tests/test_database_upgrade.py
new file mode 100644
index 00000000..14ac3c23
--- /dev/null
+++ b/tests/test_database_upgrade.py
@@ -0,0 +1,141 @@
+# Copyright 2025 sysmocom - s.f.m.c. GmbH
+# SPDX-License-Identifier: AGPL-3.0-or-later
+import sqlite3
+import os
+import glob
+import re
+import pytest
+from pathlib import Path
+from database import Database
+from logtool import LogTool
+from pyhss_config import config
+
+top_dir = Path(Path(__file__) / "../..").resolve()
+test_db = os.path.join(top_dir, "tests/.pyhss_test_database_upgrade.db")
+
+
+def create_table_with_sorted_columns(lines):
+ assert len(lines) > 2
+ assert lines[-1] == ");"
+
+ start = lines[0]
+ end = lines[-1]
+
+ columns_and_keys = " ".join(lines[1:-1])
+
+ # Replace commas inside paranethesis first
+ marker = "|"
+ assert marker not in columns_and_keys
+ for p in re.findall(r"\(.*?\)", columns_and_keys):
+ columns_and_keys = columns_and_keys.replace(p, p.replace(",", marker))
+
+ lines = columns_and_keys.split(",")
+ columns = []
+ keys = []
+
+ for line in lines:
+ line = line.replace(marker, ",").strip() + ","
+ word = line.split(" ")[0]
+ if word in ["UNIQUE", "FOREIGN", "PRIMARY"]:
+ keys += [f"\t{line}"]
+ else:
+ columns += [f"\t{line}"]
+
+ ret = [start]
+ ret += sorted(columns)
+ ret += keys
+ ret[-1] = ret[-1].rstrip(",")
+ ret += [end]
+ return ret
+
+
+def dump_sql(tmpdir):
+ conn = sqlite3.connect(test_db)
+
+ ret_sql = ""
+ for cmd in conn.iterdump():
+ lines = cmd.split("\n")
+
+ if cmd.startswith("INSERT INTO "):
+ continue
+
+ if cmd.startswith("CREATE TABLE "):
+ lines = create_table_with_sorted_columns(lines)
+
+ for line in lines:
+ ret_sql += f"{line.rstrip()}\n"
+
+ conn.close()
+
+ ret_path = os.path.join(tmpdir, "current_db.sql")
+ with open(ret_path, "w") as f:
+ f.write(ret_sql)
+
+ return ret_sql, ret_path
+
+
+def compare_with_latest_sql(tmpdir):
+ latest_path = os.path.join(top_dir, "tests/db_schema/latest.sql")
+ with open(latest_path) as f:
+ latest_sql = f.read()
+
+ current_sql, current_path = dump_sql(tmpdir)
+
+ assert current_sql == latest_sql, f"compare_with_latest_sql failed, {current_path} vs. {latest_path}"
+
+
+def test_new_db(tmpdir, monkeypatch):
+ if os.path.exists(test_db):
+ os.unlink(test_db)
+
+ monkeypatch.setitem(config["database"], "database", test_db)
+ db = Database(LogTool(config), main_service=True)
+ db.engine.dispose()
+
+ compare_with_latest_sql(tmpdir)
+
+
+def test_old_versions(tmpdir, monkeypatch):
+ monkeypatch.setitem(config["database"], "database", test_db)
+
+ pattern = os.path.join(top_dir, "tests/db_schema/*.sql")
+ for sql in glob.glob(pattern):
+ if sql.endswith("/20231009_release_1.0.0.sql"):
+ # See test_unsupported_1_0_0() below
+ continue
+ if os.path.exists(test_db):
+ os.unlink(test_db)
+
+ print(f"Testing {sql}")
+
+ # Create database from the SQL file
+ conn = sqlite3.connect(test_db)
+ with open(sql) as f:
+ sql_script = f.read()
+ conn.executescript(sql_script)
+ conn.close()
+
+ # Upgrade the database
+ db = Database(LogTool(config), main_service=True)
+ db.engine.dispose()
+
+ # Compare
+ compare_with_latest_sql(tmpdir)
+
+
+def test_unsupported_1_0_0(tmpdir, monkeypatch):
+ monkeypatch.setitem(config["database"], "database", test_db)
+
+ if os.path.exists(test_db):
+ os.unlink(test_db)
+
+ conn = sqlite3.connect(test_db)
+ sql = os.path.join(top_dir, "tests/db_schema/20231009_release_1.0.0.sql")
+ with open(sql) as f:
+ sql_script = f.read()
+ conn.executescript(sql_script)
+ conn.close()
+
+ with pytest.raises(SystemExit) as e:
+ Database(LogTool(config), main_service=True)
+ assert e.value.code == 20
diff --git a/tests/test_gsup_air.py b/tests/test_gsup_air.py
index c5996500..b7dc8b44 100644
--- a/tests/test_gsup_air.py
+++ b/tests/test_gsup_air.py
@@ -50,6 +50,7 @@ def connect(self):
# Send the identity response to the server
data = self.ipa.tag_unit(self.identity.encode('utf-8'))
+ data = data + self.ipa.tag_serial("TEST-00-00-00-00-00-00".encode('utf-8'))
data = self.ipa.id_resp(data)
self.sock.send(data)