From 876904c92e97cd06cad73cc64ffb96c1e632525c Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 11 Dec 2025 09:20:52 +0100 Subject: [PATCH 01/16] services: add #!/usr/bin/env python3 Add the hashbang line to each service script and make them executable, so these can be executed directly from the terminal. --- services/apiService.py | 1 + services/databaseService.py | 1 + services/diameterService.py | 1 + services/georedService.py | 1 + services/gsupService.py | 1 + services/hssService.py | 1 + services/logService.py | 1 + services/metricService.py | 1 + 8 files changed, 8 insertions(+) mode change 100644 => 100755 services/databaseService.py mode change 100644 => 100755 services/gsupService.py diff --git a/services/apiService.py b/services/apiService.py index 599b3dbb..a2ab4cb3 100755 --- a/services/apiService.py +++ b/services/apiService.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2022-2025 Nick # Copyright 2023-2025 David Kneipp # Copyright 2025 sysmocom - s.f.m.c. GmbH diff --git a/services/databaseService.py b/services/databaseService.py old mode 100644 new mode 100755 index 94bb4e2c..2527ea08 --- a/services/databaseService.py +++ b/services/databaseService.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2024 David Kneipp # Copyright 2025 Victor Seva # Copyright 2025 sysmocom - s.f.m.c. GmbH diff --git a/services/diameterService.py b/services/diameterService.py index 2ba47d1a..90795e26 100755 --- a/services/diameterService.py +++ b/services/diameterService.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2023-2024 David Kneipp # Copyright 2025 sysmocom - s.f.m.c. GmbH # SPDX-License-Identifier: AGPL-3.0-or-later diff --git a/services/georedService.py b/services/georedService.py index 8ee4ce15..35713796 100755 --- a/services/georedService.py +++ b/services/georedService.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2019-2021 Nick # Copyright 2023-2025 David Kneipp # SPDX-License-Identifier: AGPL-3.0-or-later diff --git a/services/gsupService.py b/services/gsupService.py old mode 100644 new mode 100755 index aa64ac61..76b3fe83 --- a/services/gsupService.py +++ b/services/gsupService.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # PyHSS GSUP Service # Copyright 2025 Lennart Rosam # Copyright 2025 Alexander Couzens diff --git a/services/hssService.py b/services/hssService.py index c5db43d1..7e65e5e1 100755 --- a/services/hssService.py +++ b/services/hssService.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2023-2025 David Kneipp # SPDX-License-Identifier: AGPL-3.0-or-later import os, sys, json, time, traceback, socket diff --git a/services/logService.py b/services/logService.py index e01da65a..cd682120 100755 --- a/services/logService.py +++ b/services/logService.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2023-2024 David Kneipp # SPDX-License-Identifier: AGPL-3.0-or-later import os, sys, json, socket diff --git a/services/metricService.py b/services/metricService.py index 64112b11..af6a316a 100755 --- a/services/metricService.py +++ b/services/metricService.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2023-2024 David Kneipp # SPDX-License-Identifier: AGPL-3.0-or-later import asyncio From 531db8fb00c82f5642d3656efb376ece20997214 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 12 Dec 2025 09:15:19 +0100 Subject: [PATCH 02/16] pyproject.toml: add ruff options Configure ruff, the fast Python linter and code formatter, so developers can run "ruff check" and "ruff format --diff" in a pre-commit git hook to catch some errors early and to have consistent formatting. Set a line length that seems appropriate for pyhss and include the files that already pass all checks. --- pyproject.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 594c6544..157dc221 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,3 +41,11 @@ filterwarnings = [ # https://github.com/python/cpython/issues/135834 "ignore:The default datetime adapter is deprecated as of Python 3.12" ] + +[tool.ruff] +line-length = 120 +include = [ + "lib/pyhss_config.py", + "tests/test_license_headers.py", + "tests/test_milenage.py", +] From 435e596d8d7d83a7dfe80ad54ed76fd801eae691 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 11 Dec 2025 08:50:43 +0100 Subject: [PATCH 03/16] DB: let only the main service create/upgrade it Before this patch we have multiple services trying to create the database and trying to upgrade the schema at the same time. (The logic for upgrading is currently just creating missing tables, I plan to improve this with future patches.) Instead of doing that, let only the main service, pyhss_hss, do this and let all other services wait until the database schema is ready. --- lib/database.py | 24 ++---------- lib/databaseSchema.py | 86 ++++++++++++++++++++++++++++++++++++++++++ lib/diameter.py | 14 ++++++- pyproject.toml | 1 + services/hssService.py | 10 ++++- tests/fixtures.py | 2 +- 6 files changed, 113 insertions(+), 24 deletions(-) create mode 100644 lib/databaseSchema.py diff --git a/lib/database.py b/lib/database.py index f810e2d7..599d501c 100755 --- a/lib/database.py +++ b/lib/database.py @@ -6,10 +6,8 @@ from typing import Optional from sqlalchemy import Column, Integer, String, MetaData, Table, Boolean, ForeignKey, select, UniqueConstraint, DateTime, BigInteger, Text, DateTime, Float -from sqlalchemy import create_engine, inspect -from sqlalchemy.engine.reflection import Inspector +from sqlalchemy import create_engine from sqlalchemy.sql import desc, func -from sqlalchemy_utils import database_exists, create_database from sqlalchemy.orm import sessionmaker, relationship, Session, class_mapper from sqlalchemy.orm.attributes import History, get_history from sqlalchemy.orm import declarative_base @@ -22,6 +20,7 @@ import socket import pprint import S6a_crypt +from databaseSchema import DatabaseSchema from baseModels import SubscriberInfo, LocationInfo2G from gsup.protocol.ipa_peer import IPAPeerRole from messaging import RedisMessaging @@ -356,7 +355,7 @@ class SUBSCRIBER_ATTRIBUTES_OPERATION_LOG(OPERATION_LOG_BASE): class Database: - def __init__(self, logTool, redisMessaging=None): + def __init__(self, logTool, redisMessaging=None, main_service: bool = False): self.redisUseUnixSocket = config.get('redis', {}).get('useUnixSocket', False) self.redisUnixSocketPath = config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') @@ -395,13 +394,7 @@ def __init__(self, logTool, redisMessaging=None): pool_size=config['logging'].get('sqlalchemy_pool_size', 30), max_overflow=config['logging'].get('sqlalchemy_max_overflow', 0)) - # Create database if it does not exist. - if not database_exists(self.engine.url): - self.logTool.log(service='Database', level='debug', message="Creating database", redisClient=self.redisMessaging) - create_database(self.engine.url) - Base.metadata.create_all(self.engine) - else: - self.logTool.log(service='Database', level='debug', message="Database already created", redisClient=self.redisMessaging) + DatabaseSchema(self.logTool, Base, self.engine, main_service) #Load IMEI TAC database into Redis if enabled if self.tacDatabasePath: @@ -411,15 +404,6 @@ def __init__(self, logTool, redisMessaging=None): self.logTool.log(service='Database', level='info', message="Not loading EIR IMEI TAC Database as Redis not enabled or TAC CSV Database not set in config", redisClient=self.redisMessaging) self.tacData = {} - # Create individual tables if they do not exist - inspector = inspect(self.engine) - for table_name in Base.metadata.tables.keys(): - if table_name not in inspector.get_table_names(): - self.logTool.log(service='Database', level='debug', message=f"Creating table {table_name}", redisClient=self.redisMessaging) - Base.metadata.tables[table_name].create(bind=self.engine) - else: - self.logTool.log(service='Database', level='debug', message=f"Table {table_name} already exists", redisClient=self.redisMessaging) - def load_IMEI_database_into_Redis(self): try: self.logTool.log(service='Database', level='info', message=f"Reading IMEI TAC database CSV from: {self.tacDatabasePath}", redisClient=self.redisMessaging) diff --git a/lib/databaseSchema.py b/lib/databaseSchema.py new file mode 100644 index 00000000..7a8a0472 --- /dev/null +++ b/lib/databaseSchema.py @@ -0,0 +1,86 @@ +# Copyright 2025 sysmocom - s.f.m.c. GmbH +# SPDX-License-Identifier: AGPL-3.0-or-later +import sqlalchemy +import sys +import time +from sqlalchemy.engine import Engine +from sqlalchemy_utils import database_exists, create_database + + +class DatabaseSchema: + latest = 0 + + def __init__(self, logTool, base, engine: Engine, main_service: bool): + self.logTool = logTool + self.base = base + self.engine = engine + + if not self.is_ready(): + if main_service: + self.init_db() + self.init_tables() + else: + self.wait_until_ready() + + def get_version(self): + # Future patches will store the current schema version inside the db + return 0 + + def is_ready(self): + if not database_exists(self.engine.url): + return False + return self.get_version() == self.latest + + def wait_until_ready(self): + self.logTool.log( + service="Database", + level="info", + message="Waiting for the main service to prepare the database", + ) + + for i in range(100): + time.sleep(0.2) + if self.is_ready(): + return + + self.logTool.log( + service="Database", + level="error", + message="Database did not get ready. Is pyhss_hss (hssService) running?", + ) + sys.exit(10) + + def init_db(self): + # Create database if it does not exist + if not database_exists(self.engine.url): + self.logTool.log( + service="Database", + level="debug", + message="Creating database", + ) + create_database(self.engine.url) + self.base.metadata.create_all(self.engine) + else: + self.logTool.log( + service="Database", + level="debug", + message="Database already created", + ) + + def init_tables(self): + # Create individual tables if they do not exist + inspector = sqlalchemy.inspect(self.engine) + for table_name in self.base.metadata.tables.keys(): + if table_name not in inspector.get_table_names(): + self.logTool.log( + service="Database", + level="debug", + message=f"Creating table {table_name}", + ) + self.base.metadata.tables[table_name].create(bind=self.engine) + else: + self.logTool.log( + service="Database", + level="debug", + message=f"Table {table_name} already exists", + ) diff --git a/lib/diameter.py b/lib/diameter.py index f95ab3e0..e220d03d 100755 --- a/lib/diameter.py +++ b/lib/diameter.py @@ -29,7 +29,17 @@ class Diameter: - def __init__(self, logTool, originHost: str="hss01", originRealm: str="epc.mnc999.mcc999.3gppnetwork.org", productName: str="PyHSS", mcc: str="999", mnc: str="999", redisMessaging=None): + def __init__( + self, + logTool, + originHost: str = "hss01", + originRealm: str = "epc.mnc999.mcc999.3gppnetwork.org", + productName: str = "PyHSS", + mcc: str = "999", + mnc: str = "999", + redisMessaging=None, + main_service: bool = False, + ): self.OriginHost = self.string_to_hex(originHost) self.OriginRealm = self.string_to_hex(originRealm) self.ProductName = self.string_to_hex(productName) @@ -49,7 +59,7 @@ def __init__(self, logTool, originHost: str="hss01", originRealm: str="epc.mnc99 self.hostname = socket.gethostname() - self.database = Database(logTool=logTool) + self.database = Database(logTool=logTool, main_service=main_service) self.diameterRequestTimeout = int(config.get('hss', {}).get('diameter_request_timeout', 10)) self.diameterPeerKey = config.get('hss', {}).get('diameter_peer_key', 'diameterPeers') self.useDraFallback = config.get('hss', {}).get('use_dra_fallback', False) diff --git a/pyproject.toml b/pyproject.toml index 157dc221..e9b9bf60 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ filterwarnings = [ [tool.ruff] line-length = 120 include = [ + "lib/databaseSchema.py", "lib/pyhss_config.py", "tests/test_license_headers.py", "tests/test_milenage.py", diff --git a/services/hssService.py b/services/hssService.py index 7e65e5e1..2f95279f 100755 --- a/services/hssService.py +++ b/services/hssService.py @@ -30,7 +30,15 @@ def __init__(self): self.originHost = config.get('hss', {}).get('OriginHost', f'hss01') self.productName = config.get('hss', {}).get('ProductName', f'PyHSS') self.logTool.log(service='HSS', level='info', message=f"{self.banners.hssService()}", redisClient=self.redisMessaging) - self.diameterLibrary = Diameter(logTool=self.logTool, originHost=self.originHost, originRealm=self.originRealm, productName=self.productName, mcc=self.mcc, mnc=self.mnc) + self.diameterLibrary = Diameter( + logTool=self.logTool, + originHost=self.originHost, + originRealm=self.originRealm, + productName=self.productName, + mcc=self.mcc, + mnc=self.mnc, + main_service=True, + ) self.benchmarking = config.get('hss').get('enable_benchmarking', False) self.hostname = self.originHost self.diameterPeerKey = config.get('hss', {}).get('diameter_peer_key', 'diameterPeers') diff --git a/tests/fixtures.py b/tests/fixtures.py index e11b48dc..0d838f26 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -48,7 +48,7 @@ def create_test_db(): print(f"Removing previous test DB: {test_db}") os.unlink(test_db) - db = Database(LogTool(config)) + db = Database(LogTool(config), main_service=True) assert os.path.exists(test_db) db.CreateObj(APN, { From 8d274cfee72484f866d4f04f868f4bb4366fb325 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Wed, 10 Dec 2025 17:29:19 +0100 Subject: [PATCH 04/16] tests: add test_database_upgrade --- .gitignore | 2 +- pyproject.toml | 1 + tests/db_schema/latest.sql | 267 +++++++++++++++++++++++++++++++++ tests/test_database_upgrade.py | 119 +++++++++++++++ 4 files changed, 388 insertions(+), 1 deletion(-) create mode 100644 tests/db_schema/latest.sql create mode 100644 tests/test_database_upgrade.py diff --git a/.gitignore b/.gitignore index 919c218a..bfcadab3 100755 --- a/.gitignore +++ b/.gitignore @@ -31,6 +31,6 @@ lib/__pycache__/milenage.cpython-36.pyc pyhss.egg-info !tests/config.yaml -tests/.pyhss.db +tests/.pyhss*.db .venv hss.db diff --git a/pyproject.toml b/pyproject.toml index e9b9bf60..25490056 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,6 +47,7 @@ line-length = 120 include = [ "lib/databaseSchema.py", "lib/pyhss_config.py", + "tests/test_database_upgrade.py", "tests/test_license_headers.py", "tests/test_milenage.py", ] diff --git a/tests/db_schema/latest.sql b/tests/db_schema/latest.sql new file mode 100644 index 00000000..5d871269 --- /dev/null +++ b/tests/db_schema/latest.sql @@ -0,0 +1,267 @@ +BEGIN TRANSACTION; +CREATE TABLE apn ( + apn VARCHAR(50) NOT NULL, + apn_ambr_dl INTEGER NOT NULL, + apn_ambr_ul INTEGER NOT NULL, + apn_id INTEGER NOT NULL, + arp_preemption_capability BOOLEAN, + arp_preemption_vulnerability BOOLEAN, + arp_priority INTEGER, + charging_characteristics VARCHAR(4), + charging_rule_list VARCHAR(18), + ip_version INTEGER, + last_modified VARCHAR(100), + nbiot BOOLEAN, + nidd_mechanism INTEGER, + nidd_preferred_data_mode INTEGER, + nidd_rds INTEGER, + nidd_scef_id VARCHAR(512), + nidd_scef_realm VARCHAR(512), + pgw_address VARCHAR(50), + qci INTEGER, + sgw_address VARCHAR(50), + PRIMARY KEY (apn_id) +); +CREATE TABLE auc ( + adm1 VARCHAR(20), + algo VARCHAR(20), + amf VARCHAR(4) NOT NULL, + auc_id INTEGER NOT NULL, + batch_name VARCHAR(20), + des VARCHAR(128), + esim BOOLEAN, + iccid VARCHAR(20), + imsi VARCHAR(18), + ki VARCHAR(32) NOT NULL, + kid VARCHAR(20), + last_modified VARCHAR(100), + lpa VARCHAR(128), + misc1 VARCHAR(128), + misc2 VARCHAR(128), + misc3 VARCHAR(128), + misc4 VARCHAR(128), + opc VARCHAR(32) NOT NULL, + pin1 VARCHAR(20), + pin2 VARCHAR(20), + psk VARCHAR(128), + puk1 VARCHAR(20), + puk2 VARCHAR(20), + sim_vendor VARCHAR(20), + sqn BIGINT, + PRIMARY KEY (auc_id), + UNIQUE (iccid), + UNIQUE (imsi) +); +CREATE TABLE charging_rule ( + arp_preemption_capability BOOLEAN, + arp_preemption_vulnerability BOOLEAN, + arp_priority INTEGER, + charging_rule_id INTEGER NOT NULL, + gbr_dl INTEGER NOT NULL, + gbr_ul INTEGER NOT NULL, + last_modified VARCHAR(100), + mbr_dl INTEGER NOT NULL, + mbr_ul INTEGER NOT NULL, + precedence INTEGER, + qci INTEGER, + rating_group INTEGER, + rule_name VARCHAR(20), + tft_group_id INTEGER, + PRIMARY KEY (charging_rule_id) +); +CREATE TABLE eir ( + eir_id INTEGER NOT NULL, + imei VARCHAR(60), + imsi VARCHAR(60), + last_modified VARCHAR(100), + match_response_code INTEGER, + regex_mode INTEGER, + PRIMARY KEY (eir_id) +); +CREATE TABLE eir_history ( + imsi_imei VARCHAR(60), + imsi_imei_history_id INTEGER NOT NULL, + imsi_imei_timestamp DATETIME, + last_modified VARCHAR(100), + match_response_code INTEGER, + PRIMARY KEY (imsi_imei_history_id), + UNIQUE (imsi_imei) +); +CREATE TABLE emergency_subscriber ( + access_network_charging_address VARCHAR(512), + access_network_gateway_address VARCHAR(512), + emergency_subscriber_id INTEGER NOT NULL, + gx_origin_host VARCHAR(512), + gx_origin_realm VARCHAR(512), + imsi VARCHAR(18), + ip VARCHAR(512), + last_modified VARCHAR(100), + rat_type VARCHAR(512), + serving_pcscf VARCHAR(512), + serving_pcscf_timestamp VARCHAR(512), + serving_pgw VARCHAR(512), + serving_pgw_timestamp VARCHAR(512), + PRIMARY KEY (emergency_subscriber_id) +); +CREATE TABLE ims_subscriber ( + ifc_path VARCHAR(512), + ims_subscriber_id INTEGER NOT NULL, + imsi VARCHAR(18), + last_modified VARCHAR(100), + msisdn VARCHAR(18), + msisdn_list VARCHAR(1200), + pcscf VARCHAR(512), + pcscf_active_session VARCHAR(512), + pcscf_peer VARCHAR(512), + pcscf_realm VARCHAR(512), + pcscf_timestamp DATETIME, + scscf VARCHAR(512), + scscf_peer VARCHAR(512), + scscf_realm VARCHAR(512), + scscf_timestamp DATETIME, + sh_profile TEXT, + sh_template_path VARCHAR(512), + xcap_profile TEXT, + PRIMARY KEY (ims_subscriber_id), + UNIQUE (msisdn) +); +CREATE TABLE operation_log ( + apn_id INTEGER, + auc_id INTEGER, + changes TEXT, + charging_rule_id INTEGER, + eir_id INTEGER, + emergency_subscriber_id INTEGER, + id INTEGER NOT NULL, + ims_subscriber_id INTEGER, + imsi_imei_history_id INTEGER, + item_id INTEGER NOT NULL, + last_modified VARCHAR(100), + operation VARCHAR(10), + operation_id VARCHAR(36) NOT NULL, + roaming_network_id INTEGER, + roaming_rule_id INTEGER, + serving_apn_id INTEGER, + subscriber_attributes_id INTEGER, + subscriber_id INTEGER, + subscriber_routing_id INTEGER, + table_name VARCHAR(255), + tft_id INTEGER, + timestamp DATETIME, + PRIMARY KEY (id), + FOREIGN KEY(apn_id) REFERENCES apn (apn_id), + FOREIGN KEY(subscriber_routing_id) REFERENCES subscriber_routing (subscriber_routing_id), + FOREIGN KEY(serving_apn_id) REFERENCES serving_apn (serving_apn_id), + FOREIGN KEY(auc_id) REFERENCES auc (auc_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id), + FOREIGN KEY(ims_subscriber_id) REFERENCES ims_subscriber (ims_subscriber_id), + FOREIGN KEY(roaming_rule_id) REFERENCES roaming_rule (roaming_rule_id), + FOREIGN KEY(roaming_network_id) REFERENCES roaming_network (roaming_network_id), + FOREIGN KEY(emergency_subscriber_id) REFERENCES emergency_subscriber (emergency_subscriber_id), + FOREIGN KEY(charging_rule_id) REFERENCES charging_rule (charging_rule_id), + FOREIGN KEY(tft_id) REFERENCES tft (tft_id), + FOREIGN KEY(eir_id) REFERENCES eir (eir_id), + FOREIGN KEY(imsi_imei_history_id) REFERENCES eir_history (imsi_imei_history_id), + FOREIGN KEY(subscriber_attributes_id) REFERENCES subscriber_attributes (subscriber_attributes_id) +); +CREATE TABLE roaming_network ( + last_modified VARCHAR(100), + mcc VARCHAR(100), + mnc VARCHAR(100), + name VARCHAR(512), + preference INTEGER, + roaming_network_id INTEGER NOT NULL, + PRIMARY KEY (roaming_network_id) +); +CREATE TABLE roaming_rule ( + allow BOOLEAN, + enabled BOOLEAN, + last_modified VARCHAR(100), + roaming_network_id INTEGER, + roaming_rule_id INTEGER NOT NULL, + PRIMARY KEY (roaming_rule_id), + FOREIGN KEY(roaming_network_id) REFERENCES roaming_network (roaming_network_id) ON DELETE CASCADE +); +CREATE TABLE serving_apn ( + apn INTEGER, + ip_version INTEGER, + last_modified VARCHAR(100), + pcrf_session_id VARCHAR(100), + serving_apn_id INTEGER NOT NULL, + serving_pgw VARCHAR(512), + serving_pgw_peer VARCHAR(512), + serving_pgw_realm VARCHAR(512), + serving_pgw_timestamp DATETIME, + subscriber_id INTEGER, + subscriber_routing VARCHAR(100), + PRIMARY KEY (serving_apn_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE, + FOREIGN KEY(apn) REFERENCES apn (apn_id) ON DELETE CASCADE +); +CREATE TABLE subscriber ( + apn_list VARCHAR(64) NOT NULL, + auc_id INTEGER NOT NULL, + default_apn INTEGER NOT NULL, + enabled BOOLEAN, + imsi VARCHAR(18), + last_location_update_timestamp DATETIME, + last_modified VARCHAR(100), + last_seen_cell_id VARCHAR(64), + last_seen_eci VARCHAR(64), + last_seen_enodeb_id VARCHAR(64), + last_seen_mcc VARCHAR(3), + last_seen_mnc VARCHAR(3), + last_seen_tac VARCHAR(64), + msisdn VARCHAR(18), + nam INTEGER, + roaming_enabled BOOLEAN, + roaming_rule_list VARCHAR(512), + serving_mme VARCHAR(512), + serving_mme_peer VARCHAR(512), + serving_mme_realm VARCHAR(512), + serving_mme_timestamp DATETIME, + serving_msc VARCHAR(512), + serving_msc_timestamp DATETIME, + serving_sgsn VARCHAR(512), + serving_sgsn_timestamp DATETIME, + serving_vlr VARCHAR(512), + serving_vlr_timestamp DATETIME, + subscribed_rau_tau_timer INTEGER, + subscriber_id INTEGER NOT NULL, + ue_ambr_dl INTEGER, + ue_ambr_ul INTEGER, + PRIMARY KEY (subscriber_id), + UNIQUE (imsi), + FOREIGN KEY(auc_id) REFERENCES auc (auc_id), + FOREIGN KEY(default_apn) REFERENCES apn (apn_id) +); +CREATE TABLE subscriber_attributes ( + "key" VARCHAR(60), + last_modified VARCHAR(100), + subscriber_attributes_id INTEGER NOT NULL, + subscriber_id INTEGER NOT NULL, + value VARCHAR(12000), + PRIMARY KEY (subscriber_attributes_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE +); +CREATE TABLE subscriber_routing ( + apn_id INTEGER, + ip_address VARCHAR(254), + ip_version INTEGER, + last_modified VARCHAR(100), + subscriber_id INTEGER, + subscriber_routing_id INTEGER NOT NULL, + PRIMARY KEY (subscriber_routing_id), + UNIQUE (subscriber_id, apn_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE, + FOREIGN KEY(apn_id) REFERENCES apn (apn_id) ON DELETE CASCADE +); +CREATE TABLE tft ( + direction INTEGER NOT NULL, + last_modified VARCHAR(100), + tft_group_id INTEGER NOT NULL, + tft_id INTEGER NOT NULL, + tft_string VARCHAR(100) NOT NULL, + PRIMARY KEY (tft_id) +); +COMMIT; diff --git a/tests/test_database_upgrade.py b/tests/test_database_upgrade.py new file mode 100644 index 00000000..6c235d4b --- /dev/null +++ b/tests/test_database_upgrade.py @@ -0,0 +1,119 @@ +# Copyright 2025 sysmocom - s.f.m.c. GmbH +# SPDX-License-Identifier: AGPL-3.0-or-later +import sqlite3 +import os +import glob +import re +from pathlib import Path +from database import Database +from logtool import LogTool +from pyhss_config import config + +top_dir = Path(Path(__file__) / "../..").resolve() +test_db = os.path.join(top_dir, "tests/.pyhss_test_database_upgrade.db") + + +def create_table_with_sorted_columns(lines): + assert len(lines) > 2 + assert lines[-1] == ");" + + start = lines[0] + end = lines[-1] + + columns_and_keys = " ".join(lines[1:-1]) + + # Replace commas inside paranethesis first + marker = "|" + assert marker not in columns_and_keys + for p in re.findall(r"\(.*?\)", columns_and_keys): + columns_and_keys = columns_and_keys.replace(p, p.replace(",", marker)) + + lines = columns_and_keys.split(",") + columns = [] + keys = [] + + for line in lines: + line = line.replace(marker, ",").strip() + "," + word = line.split(" ")[0] + if word in ["UNIQUE", "FOREIGN", "PRIMARY"]: + keys += [f"\t{line}"] + else: + columns += [f"\t{line}"] + + ret = [start] + ret += sorted(columns) + ret += keys + ret[-1] = ret[-1].rstrip(",") + ret += [end] + return ret + + +def dump_sql(tmpdir): + conn = sqlite3.connect(test_db) + + ret_sql = "" + for cmd in conn.iterdump(): + lines = cmd.split("\n") + + if cmd.startswith("INSERT INTO "): + continue + + if cmd.startswith("CREATE TABLE "): + lines = create_table_with_sorted_columns(lines) + + for line in lines: + ret_sql += f"{line.rstrip()}\n" + + conn.close() + + ret_path = os.path.join(tmpdir, "current_db.sql") + with open(ret_path, "w") as f: + f.write(ret_sql) + + return ret_sql, ret_path + + +def compare_with_latest_sql(tmpdir): + latest_path = os.path.join(top_dir, "tests/db_schema/latest.sql") + with open(latest_path) as f: + latest_sql = f.read() + + current_sql, current_path = dump_sql(tmpdir) + + assert current_sql == latest_sql, f"compare_with_latest_sql failed, {current_path} vs. {latest_path}" + + +def test_new_db(tmpdir, monkeypatch): + if os.path.exists(test_db): + os.unlink(test_db) + + monkeypatch.setitem(config["database"], "database", test_db) + db = Database(LogTool(config), main_service=True) + db.engine.dispose() + + compare_with_latest_sql(tmpdir) + + +def test_old_versions(tmpdir, monkeypatch): + monkeypatch.setitem(config["database"], "database", test_db) + + pattern = os.path.join(top_dir, "tests/db_schema/*.sql") + for sql in glob.glob(pattern): + if os.path.exists(test_db): + os.unlink(test_db) + + print(f"Testing {sql}") + + # Create database from the SQL file + conn = sqlite3.connect(test_db) + with open(sql) as f: + sql_script = f.read() + conn.executescript(sql_script) + conn.close() + + # Upgrade the database + db = Database(LogTool(config), main_service=True) + db.engine.dispose() + + # Compare + compare_with_latest_sql(tmpdir) From 1de7c362e5501707dbe9f39403fafaad2a63e856 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 12 Dec 2025 11:58:06 +0100 Subject: [PATCH 05/16] CHANGELOG: add DB changes from 1.0.0 to 1.0.1 Adding automatic database migrations from 1.0.0 to 1.0.1 is not feasible because the type of ims_subscriber.ifc_path was changed from VARCHAR(18) to VARCHAR(512), and this is not trivial to do with database type sqlite (there is no ALTER TABLE MODIFY). I don't think many people are running 1.0.0 from 2023, if any at all, so just document the changes that were made. --- CHANGELOG.md | 85 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 85 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 693ee839..eb9fe7c2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -63,6 +63,91 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - SQN Resync now propogates via Geored when enabled - Renamed sh_profile to xcap_profile in ims_subscriber - Rebuilt keys using unique namespace for redis-sentinel / stateless compatibility. +- The database scheme was changed as follows. If you have a PyHSS database + created with version 1.0.0 that you would like to use with 1.0.1 or newer, + apply these changes manually. Newer versions of PyHSS have automatic database + migrations. +
+ +```diff +--- a/release_1.0.0.sql ++++ b/release_1.0.1.sql +@@ -13,6 +13,12 @@ CREATE TABLE apn ( + arp_preemption_capability BOOLEAN, + arp_preemption_vulnerability BOOLEAN, + charging_rule_list VARCHAR(18), ++ nbiot BOOLEAN, ++ nidd_scef_id VARCHAR(512), ++ nidd_scef_realm VARCHAR(512), ++ nidd_mechanism INTEGER, ++ nidd_rds INTEGER, ++ nidd_preferred_data_mode INTEGER, + last_modified VARCHAR(100), + PRIMARY KEY (apn_id) + ); +@@ -80,22 +86,40 @@ CREATE TABLE eir_history ( + PRIMARY KEY (imsi_imei_history_id), + UNIQUE (imsi_imei) + ); + CREATE TABLE ims_subscriber ( + ims_subscriber_id INTEGER NOT NULL, + msisdn VARCHAR(18), + msisdn_list VARCHAR(1200), + imsi VARCHAR(18), +- ifc_path VARCHAR(18), ++ ifc_path VARCHAR(512), + pcscf VARCHAR(512), + pcscf_realm VARCHAR(512), + pcscf_active_session VARCHAR(512), + pcscf_timestamp DATETIME, + pcscf_peer VARCHAR(512), ++ xcap_profile TEXT(12000), + sh_profile TEXT(12000), + scscf VARCHAR(512), + scscf_timestamp DATETIME, + scscf_realm VARCHAR(512), + scscf_peer VARCHAR(512), ++ sh_template_path VARCHAR(512), + last_modified VARCHAR(100), + PRIMARY KEY (ims_subscriber_id), + UNIQUE (msisdn) +@@ -115,6 +139,9 @@ CREATE TABLE operation_log ( + auc_id INTEGER, + subscriber_id INTEGER, + ims_subscriber_id INTEGER, ++ roaming_rule_id INTEGER, ++ roaming_network_id INTEGER, ++ emergency_subscriber_id INTEGER, + charging_rule_id INTEGER, + tft_id INTEGER, + eir_id INTEGER, +@@ -127,12 +154,33 @@ CREATE TABLE operation_log ( + FOREIGN KEY(auc_id) REFERENCES auc (auc_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id), + FOREIGN KEY(ims_subscriber_id) REFERENCES ims_subscriber (ims_subscriber_id), ++ FOREIGN KEY(roaming_rule_id) REFERENCES roaming_rule (roaming_rule_id), ++ FOREIGN KEY(roaming_network_id) REFERENCES roaming_network (roaming_network_id), ++ FOREIGN KEY(emergency_subscriber_id) REFERENCES emergency_subscriber (emergency_subscriber_id), + FOREIGN KEY(charging_rule_id) REFERENCES charging_rule (charging_rule_id), + FOREIGN KEY(tft_id) REFERENCES tft (tft_id), + FOREIGN KEY(eir_id) REFERENCES eir (eir_id), + FOREIGN KEY(imsi_imei_history_id) REFERENCES eir_history (imsi_imei_history_id), + FOREIGN KEY(subscriber_attributes_id) REFERENCES subscriber_attributes (subscriber_attributes_id) + ); + CREATE TABLE serving_apn ( + serving_apn_id INTEGER NOT NULL, + subscriber_id INTEGER, +@@ -160,6 +208,8 @@ CREATE TABLE subscriber ( + ue_ambr_dl INTEGER, + ue_ambr_ul INTEGER, + nam INTEGER, ++ roaming_enabled BOOLEAN, ++ roaming_rule_list VARCHAR(512), + subscribed_rau_tau_timer INTEGER, + serving_mme VARCHAR(512), + serving_mme_timestamp DATETIME, +``` +
### Fixed From c522eaf037b7440fb62203ef6e336ee7e1985824 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 11 Dec 2025 09:25:32 +0100 Subject: [PATCH 06/16] DB: add database_schema_version table --- lib/database.py | 8 ++++++++ lib/databaseSchema.py | 17 +++++++++++++++-- tests/db_schema/latest.sql | 6 ++++++ 3 files changed, 29 insertions(+), 2 deletions(-) diff --git a/lib/database.py b/lib/database.py index 599d501c..4bcd7a5f 100755 --- a/lib/database.py +++ b/lib/database.py @@ -5,6 +5,7 @@ # SPDX-License-Identifier: AGPL-3.0-or-later from typing import Optional +import sqlalchemy from sqlalchemy import Column, Integer, String, MetaData, Table, Boolean, ForeignKey, select, UniqueConstraint, DateTime, BigInteger, Text, DateTime, Float from sqlalchemy import create_engine from sqlalchemy.sql import desc, func @@ -31,6 +32,13 @@ Base = declarative_base() + +class DATABASE_SCHEMA_VERSION(Base): + __tablename__ = "database_schema_version" + upgrade_id = Column(Integer, primary_key=True, doc="Schema version") + comment = Column(String(512), doc="Notes about this version upgrade") + date = Column(DateTime(timezone=True), server_default=sqlalchemy.sql.func.now(), doc="When the upgrade was done") + class APN(Base): __tablename__ = 'apn' apn_id = Column(Integer, primary_key=True, doc='Unique ID of APN') diff --git a/lib/databaseSchema.py b/lib/databaseSchema.py index 7a8a0472..977e15e7 100644 --- a/lib/databaseSchema.py +++ b/lib/databaseSchema.py @@ -23,8 +23,21 @@ def __init__(self, logTool, base, engine: Engine, main_service: bool): self.wait_until_ready() def get_version(self): - # Future patches will store the current schema version inside the db - return 0 + ret = 0 + try: + sql = """ + SELECT version + FROM database_schema_version + ORDER BY upgrade_id DESC + LIMIT 1 + """ + with self.engine.connect() as conn: + result = conn.execute(sqlalchemy.text(sql)).fetchone() + if result: + ret = result[0] + except Exception: + pass + return ret def is_ready(self): if not database_exists(self.engine.url): diff --git a/tests/db_schema/latest.sql b/tests/db_schema/latest.sql index 5d871269..4c10b774 100644 --- a/tests/db_schema/latest.sql +++ b/tests/db_schema/latest.sql @@ -69,6 +69,12 @@ CREATE TABLE charging_rule ( tft_group_id INTEGER, PRIMARY KEY (charging_rule_id) ); +CREATE TABLE database_schema_version ( + comment VARCHAR(512), + date DATETIME DEFAULT CURRENT_TIMESTAMP, + upgrade_id INTEGER NOT NULL, + PRIMARY KEY (upgrade_id) +); CREATE TABLE eir ( eir_id INTEGER NOT NULL, imei VARCHAR(60), From 09e96d4b281c72ac43649690125f89b29a956cef Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 11 Dec 2025 10:06:09 +0100 Subject: [PATCH 07/16] DB: upgrade from 1.0.1 Version 1.0.2 has the same database scheme as 1.0.1. --- lib/databaseSchema.py | 61 ++++- tests/db_schema/20240125_release_1.0.1.sql | 257 +++++++++++++++++++++ 2 files changed, 316 insertions(+), 2 deletions(-) create mode 100644 tests/db_schema/20240125_release_1.0.1.sql diff --git a/lib/databaseSchema.py b/lib/databaseSchema.py index 977e15e7..c8bb07c4 100644 --- a/lib/databaseSchema.py +++ b/lib/databaseSchema.py @@ -8,7 +8,7 @@ class DatabaseSchema: - latest = 0 + latest = 1 def __init__(self, logTool, base, engine: Engine, main_service: bool): self.logTool = logTool @@ -19,6 +19,7 @@ def __init__(self, logTool, base, engine: Engine, main_service: bool): if main_service: self.init_db() self.init_tables() + self.upgrade_all() else: self.wait_until_ready() @@ -26,7 +27,7 @@ def get_version(self): ret = 0 try: sql = """ - SELECT version + SELECT upgrade_id FROM database_schema_version ORDER BY upgrade_id DESC LIMIT 1 @@ -97,3 +98,59 @@ def init_tables(self): level="debug", message=f"Table {table_name} already exists", ) + + def execute(self, sql): + with self.engine.connect() as conn: + conn.execute(sqlalchemy.text(sql)) + conn.commit() + + def upgrade_msg(self, new_version): + self.logTool.log( + service="Database", + level="info", + message=f"Upgrading database schema to version {new_version}", + ) + + def set_version(self, new_version): + self.execute(f""" + INSERT INTO database_schema_version (upgrade_id, comment) + VALUES ({int(new_version)}, "automatic upgrade from PyHSS") + """) + + def column_exists(self, table, column): + inspector = sqlalchemy.inspect(self.engine) + columns = inspector.get_columns(table) + + for col in columns: + if col["name"] == column: + return True + + return False + + def add_column(self, table, column, type): + if self.column_exists(table, column): + return + self.execute(f"ALTER TABLE {table} ADD {column} {type}") + + def upgrade_from_20240603_release_1_0_1(self): + if self.get_version() >= 1: + return + self.upgrade_msg(1) + self.add_column("auc", "algo", "VARCHAR(20)") + self.add_column("subscriber", "last_location_update_timestamp", "DATETIME") + self.add_column("subscriber", "last_seen_cell_id", "VARCHAR(64)") + self.add_column("subscriber", "last_seen_eci", "VARCHAR(64)") + self.add_column("subscriber", "last_seen_enodeb_id", "VARCHAR(64)") + self.add_column("subscriber", "last_seen_mcc", "VARCHAR(3)") + self.add_column("subscriber", "last_seen_mnc", "VARCHAR(3)") + self.add_column("subscriber", "last_seen_tac", "VARCHAR(64)") + self.add_column("subscriber", "serving_msc", "VARCHAR(512)") + self.add_column("subscriber", "serving_msc_timestamp", "DATETIME") + self.add_column("subscriber", "serving_sgsn", "VARCHAR(512)") + self.add_column("subscriber", "serving_sgsn_timestamp", "DATETIME") + self.add_column("subscriber", "serving_vlr", "VARCHAR(512)") + self.add_column("subscriber", "serving_vlr_timestamp", "DATETIME") + self.set_version(1) + + def upgrade_all(self): + self.upgrade_from_20240603_release_1_0_1() diff --git a/tests/db_schema/20240125_release_1.0.1.sql b/tests/db_schema/20240125_release_1.0.1.sql new file mode 100644 index 00000000..c875bd95 --- /dev/null +++ b/tests/db_schema/20240125_release_1.0.1.sql @@ -0,0 +1,257 @@ +BEGIN TRANSACTION; +CREATE TABLE apn ( + apn_id INTEGER NOT NULL, + apn VARCHAR(50) NOT NULL, + ip_version INTEGER, + pgw_address VARCHAR(50), + sgw_address VARCHAR(50), + charging_characteristics VARCHAR(4), + apn_ambr_dl INTEGER NOT NULL, + apn_ambr_ul INTEGER NOT NULL, + qci INTEGER, + arp_priority INTEGER, + arp_preemption_capability BOOLEAN, + arp_preemption_vulnerability BOOLEAN, + charging_rule_list VARCHAR(18), + nbiot BOOLEAN, + nidd_scef_id VARCHAR(512), + nidd_scef_realm VARCHAR(512), + nidd_mechanism INTEGER, + nidd_rds INTEGER, + nidd_preferred_data_mode INTEGER, + last_modified VARCHAR(100), + PRIMARY KEY (apn_id) +); +CREATE TABLE auc ( + auc_id INTEGER NOT NULL, + ki VARCHAR(32) NOT NULL, + opc VARCHAR(32) NOT NULL, + amf VARCHAR(4) NOT NULL, + sqn BIGINT, + iccid VARCHAR(20), + imsi VARCHAR(18), + batch_name VARCHAR(20), + sim_vendor VARCHAR(20), + esim BOOLEAN, + lpa VARCHAR(128), + pin1 VARCHAR(20), + pin2 VARCHAR(20), + puk1 VARCHAR(20), + puk2 VARCHAR(20), + kid VARCHAR(20), + psk VARCHAR(128), + des VARCHAR(128), + adm1 VARCHAR(20), + misc1 VARCHAR(128), + misc2 VARCHAR(128), + misc3 VARCHAR(128), + misc4 VARCHAR(128), + last_modified VARCHAR(100), + PRIMARY KEY (auc_id), + UNIQUE (iccid), + UNIQUE (imsi) +); +CREATE TABLE charging_rule ( + charging_rule_id INTEGER NOT NULL, + rule_name VARCHAR(20), + qci INTEGER, + arp_priority INTEGER, + arp_preemption_capability BOOLEAN, + arp_preemption_vulnerability BOOLEAN, + mbr_dl INTEGER NOT NULL, + mbr_ul INTEGER NOT NULL, + gbr_dl INTEGER NOT NULL, + gbr_ul INTEGER NOT NULL, + tft_group_id INTEGER, + precedence INTEGER, + rating_group INTEGER, + last_modified VARCHAR(100), + PRIMARY KEY (charging_rule_id) +); +CREATE TABLE eir ( + eir_id INTEGER NOT NULL, + imei VARCHAR(60), + imsi VARCHAR(60), + regex_mode INTEGER, + match_response_code INTEGER, + last_modified VARCHAR(100), + PRIMARY KEY (eir_id) +); +CREATE TABLE eir_history ( + imsi_imei_history_id INTEGER NOT NULL, + imsi_imei VARCHAR(60), + match_response_code INTEGER, + imsi_imei_timestamp DATETIME, + last_modified VARCHAR(100), + PRIMARY KEY (imsi_imei_history_id), + UNIQUE (imsi_imei) +); +CREATE TABLE emergency_subscriber ( + emergency_subscriber_id INTEGER NOT NULL, + imsi VARCHAR(18), + serving_pgw VARCHAR(512), + serving_pgw_timestamp VARCHAR(512), + serving_pcscf VARCHAR(512), + serving_pcscf_timestamp VARCHAR(512), + gx_origin_realm VARCHAR(512), + gx_origin_host VARCHAR(512), + rat_type VARCHAR(512), + ip VARCHAR(512), + access_network_gateway_address VARCHAR(512), + access_network_charging_address VARCHAR(512), + last_modified VARCHAR(100), + PRIMARY KEY (emergency_subscriber_id) +); +-- NOTE: xcap_profile and sh_profile might also be TEXT(12000): +-- https://github.com/nickvsnetworking/pyhss/commit/7d66298698d92176be1fef212de409a3ecfcdaf6 +-- But migrating this with sqlite is not trivial (no ALTER TABLE MODIFY). In +-- practice it won't matter so let's just assume TEXT here. +CREATE TABLE ims_subscriber ( + ims_subscriber_id INTEGER NOT NULL, + msisdn VARCHAR(18), + msisdn_list VARCHAR(1200), + imsi VARCHAR(18), + ifc_path VARCHAR(512), + pcscf VARCHAR(512), + pcscf_realm VARCHAR(512), + pcscf_active_session VARCHAR(512), + pcscf_timestamp DATETIME, + pcscf_peer VARCHAR(512), + xcap_profile TEXT, + sh_profile TEXT, + scscf VARCHAR(512), + scscf_timestamp DATETIME, + scscf_realm VARCHAR(512), + scscf_peer VARCHAR(512), + sh_template_path VARCHAR(512), + last_modified VARCHAR(100), + PRIMARY KEY (ims_subscriber_id), + UNIQUE (msisdn) +); +CREATE TABLE operation_log ( + id INTEGER NOT NULL, + item_id INTEGER NOT NULL, + operation_id VARCHAR(36) NOT NULL, + operation VARCHAR(10), + changes TEXT, + last_modified VARCHAR(100), + timestamp DATETIME, + table_name VARCHAR(255), + apn_id INTEGER, + subscriber_routing_id INTEGER, + serving_apn_id INTEGER, + auc_id INTEGER, + subscriber_id INTEGER, + ims_subscriber_id INTEGER, + roaming_rule_id INTEGER, + roaming_network_id INTEGER, + emergency_subscriber_id INTEGER, + charging_rule_id INTEGER, + tft_id INTEGER, + eir_id INTEGER, + imsi_imei_history_id INTEGER, + subscriber_attributes_id INTEGER, + PRIMARY KEY (id), + FOREIGN KEY(apn_id) REFERENCES apn (apn_id), + FOREIGN KEY(subscriber_routing_id) REFERENCES subscriber_routing (subscriber_routing_id), + FOREIGN KEY(serving_apn_id) REFERENCES serving_apn (serving_apn_id), + FOREIGN KEY(auc_id) REFERENCES auc (auc_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id), + FOREIGN KEY(ims_subscriber_id) REFERENCES ims_subscriber (ims_subscriber_id), + FOREIGN KEY(roaming_rule_id) REFERENCES roaming_rule (roaming_rule_id), + FOREIGN KEY(roaming_network_id) REFERENCES roaming_network (roaming_network_id), + FOREIGN KEY(emergency_subscriber_id) REFERENCES emergency_subscriber (emergency_subscriber_id), + FOREIGN KEY(charging_rule_id) REFERENCES charging_rule (charging_rule_id), + FOREIGN KEY(tft_id) REFERENCES tft (tft_id), + FOREIGN KEY(eir_id) REFERENCES eir (eir_id), + FOREIGN KEY(imsi_imei_history_id) REFERENCES eir_history (imsi_imei_history_id), + FOREIGN KEY(subscriber_attributes_id) REFERENCES subscriber_attributes (subscriber_attributes_id) +); +CREATE TABLE roaming_network ( + roaming_network_id INTEGER NOT NULL, + name VARCHAR(512), + preference INTEGER, + mcc VARCHAR(100), + mnc VARCHAR(100), + last_modified VARCHAR(100), + PRIMARY KEY (roaming_network_id) +); +CREATE TABLE roaming_rule ( + roaming_rule_id INTEGER NOT NULL, + roaming_network_id INTEGER, + allow BOOLEAN, + enabled BOOLEAN, + last_modified VARCHAR(100), + PRIMARY KEY (roaming_rule_id), + FOREIGN KEY(roaming_network_id) REFERENCES roaming_network (roaming_network_id) ON DELETE CASCADE +); +CREATE TABLE serving_apn ( + serving_apn_id INTEGER NOT NULL, + subscriber_id INTEGER, + apn INTEGER, + pcrf_session_id VARCHAR(100), + subscriber_routing VARCHAR(100), + ip_version INTEGER, + serving_pgw VARCHAR(512), + serving_pgw_timestamp DATETIME, + serving_pgw_realm VARCHAR(512), + serving_pgw_peer VARCHAR(512), + last_modified VARCHAR(100), + PRIMARY KEY (serving_apn_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE, + FOREIGN KEY(apn) REFERENCES apn (apn_id) ON DELETE CASCADE +); +CREATE TABLE subscriber ( + subscriber_id INTEGER NOT NULL, + imsi VARCHAR(18), + enabled BOOLEAN, + auc_id INTEGER NOT NULL, + default_apn INTEGER NOT NULL, + apn_list VARCHAR(64) NOT NULL, + msisdn VARCHAR(18), + ue_ambr_dl INTEGER, + ue_ambr_ul INTEGER, + nam INTEGER, + roaming_enabled BOOLEAN, + roaming_rule_list VARCHAR(512), + subscribed_rau_tau_timer INTEGER, + serving_mme VARCHAR(512), + serving_mme_timestamp DATETIME, + serving_mme_realm VARCHAR(512), + serving_mme_peer VARCHAR(512), + last_modified VARCHAR(100), + PRIMARY KEY (subscriber_id), + UNIQUE (imsi), + FOREIGN KEY(auc_id) REFERENCES auc (auc_id), + FOREIGN KEY(default_apn) REFERENCES apn (apn_id) +); +CREATE TABLE subscriber_attributes ( + subscriber_attributes_id INTEGER NOT NULL, + subscriber_id INTEGER NOT NULL, + "key" VARCHAR(60), + last_modified VARCHAR(100), + value VARCHAR(12000), + PRIMARY KEY (subscriber_attributes_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE +); +CREATE TABLE subscriber_routing ( + subscriber_routing_id INTEGER NOT NULL, + subscriber_id INTEGER, + apn_id INTEGER, + ip_version INTEGER, + ip_address VARCHAR(254), + last_modified VARCHAR(100), + PRIMARY KEY (subscriber_routing_id), + UNIQUE (subscriber_id, apn_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE, + FOREIGN KEY(apn_id) REFERENCES apn (apn_id) ON DELETE CASCADE +); +CREATE TABLE tft ( + tft_id INTEGER NOT NULL, + tft_group_id INTEGER NOT NULL, + tft_string VARCHAR(100) NOT NULL, + direction INTEGER NOT NULL, + last_modified VARCHAR(100), + PRIMARY KEY (tft_id) +); +COMMIT; From 6257ed0d88c69241e06dea14c004925a2ae7a8c0 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 12 Dec 2025 13:58:58 +0100 Subject: [PATCH 08/16] DB: refuse start with schema below release 1.0.1 --- lib/databaseSchema.py | 43 +++++ tests/db_schema/20231009_release_1.0.0.sql | 203 +++++++++++++++++++++ tests/test_database_upgrade.py | 22 +++ 3 files changed, 268 insertions(+) create mode 100644 tests/db_schema/20231009_release_1.0.0.sql diff --git a/lib/databaseSchema.py b/lib/databaseSchema.py index c8bb07c4..c7d4888e 100644 --- a/lib/databaseSchema.py +++ b/lib/databaseSchema.py @@ -64,6 +64,48 @@ def wait_until_ready(self): ) sys.exit(10) + def ensure_release_1_0_1_or_newer(self): + expected = { + "apn": [ + "nbiot", + "nidd_scef_id", + "nidd_scef_realm", + "nidd_mechanism", + "nidd_rds", + "nidd_preferred_data_mode", + ], + "ims_subscriber": [ + "xcap_profile", + "sh_template_path", + ], + "operation_log": [ + "roaming_rule_id", + "roaming_network_id", + "emergency_subscriber_id", + ], + "subscriber": [ + "roaming_enabled", + "roaming_rule_list", + ], + } + + for table, columns in expected.items(): + for column in columns: + if not self.column_exists(table, column): + self.logTool.log( + service="Database", + level="warning", + message=f"Database column missing: {table}.{column}", + ) + self.logTool.log( + service="Database", + level="error", + message="Database schemas from before PyHSS 1.0.1 are not supported." + " Start with a new database or migrate manually:" + " https://github.com/nickvsnetworking/pyhss/blob/master/CHANGELOG.md#101---2024-01-23", + ) + sys.exit(20) + def init_db(self): # Create database if it does not exist if not database_exists(self.engine.url): @@ -80,6 +122,7 @@ def init_db(self): level="debug", message="Database already created", ) + self.ensure_release_1_0_1_or_newer() def init_tables(self): # Create individual tables if they do not exist diff --git a/tests/db_schema/20231009_release_1.0.0.sql b/tests/db_schema/20231009_release_1.0.0.sql new file mode 100644 index 00000000..0c9acc9e --- /dev/null +++ b/tests/db_schema/20231009_release_1.0.0.sql @@ -0,0 +1,203 @@ +BEGIN TRANSACTION; +CREATE TABLE apn ( + apn_id INTEGER NOT NULL, + apn VARCHAR(50) NOT NULL, + ip_version INTEGER, + pgw_address VARCHAR(50), + sgw_address VARCHAR(50), + charging_characteristics VARCHAR(4), + apn_ambr_dl INTEGER NOT NULL, + apn_ambr_ul INTEGER NOT NULL, + qci INTEGER, + arp_priority INTEGER, + arp_preemption_capability BOOLEAN, + arp_preemption_vulnerability BOOLEAN, + charging_rule_list VARCHAR(18), + last_modified VARCHAR(100), + PRIMARY KEY (apn_id) +); +CREATE TABLE auc ( + auc_id INTEGER NOT NULL, + ki VARCHAR(32) NOT NULL, + opc VARCHAR(32) NOT NULL, + amf VARCHAR(4) NOT NULL, + sqn BIGINT, + iccid VARCHAR(20), + imsi VARCHAR(18), + batch_name VARCHAR(20), + sim_vendor VARCHAR(20), + esim BOOLEAN, + lpa VARCHAR(128), + pin1 VARCHAR(20), + pin2 VARCHAR(20), + puk1 VARCHAR(20), + puk2 VARCHAR(20), + kid VARCHAR(20), + psk VARCHAR(128), + des VARCHAR(128), + adm1 VARCHAR(20), + misc1 VARCHAR(128), + misc2 VARCHAR(128), + misc3 VARCHAR(128), + misc4 VARCHAR(128), + last_modified VARCHAR(100), + PRIMARY KEY (auc_id), + UNIQUE (iccid), + UNIQUE (imsi) +); +CREATE TABLE charging_rule ( + charging_rule_id INTEGER NOT NULL, + rule_name VARCHAR(20), + qci INTEGER, + arp_priority INTEGER, + arp_preemption_capability BOOLEAN, + arp_preemption_vulnerability BOOLEAN, + mbr_dl INTEGER NOT NULL, + mbr_ul INTEGER NOT NULL, + gbr_dl INTEGER NOT NULL, + gbr_ul INTEGER NOT NULL, + tft_group_id INTEGER, + precedence INTEGER, + rating_group INTEGER, + last_modified VARCHAR(100), + PRIMARY KEY (charging_rule_id) +); +CREATE TABLE eir ( + eir_id INTEGER NOT NULL, + imei VARCHAR(60), + imsi VARCHAR(60), + regex_mode INTEGER, + match_response_code INTEGER, + last_modified VARCHAR(100), + PRIMARY KEY (eir_id) +); +CREATE TABLE eir_history ( + imsi_imei_history_id INTEGER NOT NULL, + imsi_imei VARCHAR(60), + match_response_code INTEGER, + imsi_imei_timestamp DATETIME, + last_modified VARCHAR(100), + PRIMARY KEY (imsi_imei_history_id), + UNIQUE (imsi_imei) +); +CREATE TABLE ims_subscriber ( + ims_subscriber_id INTEGER NOT NULL, + msisdn VARCHAR(18), + msisdn_list VARCHAR(1200), + imsi VARCHAR(18), + ifc_path VARCHAR(18), + pcscf VARCHAR(512), + pcscf_realm VARCHAR(512), + pcscf_active_session VARCHAR(512), + pcscf_timestamp DATETIME, + pcscf_peer VARCHAR(512), + sh_profile TEXT(12000), + scscf VARCHAR(512), + scscf_timestamp DATETIME, + scscf_realm VARCHAR(512), + scscf_peer VARCHAR(512), + last_modified VARCHAR(100), + PRIMARY KEY (ims_subscriber_id), + UNIQUE (msisdn) +); +CREATE TABLE operation_log ( + id INTEGER NOT NULL, + item_id INTEGER NOT NULL, + operation_id VARCHAR(36) NOT NULL, + operation VARCHAR(10), + changes TEXT, + last_modified VARCHAR(100), + timestamp DATETIME, + table_name VARCHAR(255), + apn_id INTEGER, + subscriber_routing_id INTEGER, + serving_apn_id INTEGER, + auc_id INTEGER, + subscriber_id INTEGER, + ims_subscriber_id INTEGER, + charging_rule_id INTEGER, + tft_id INTEGER, + eir_id INTEGER, + imsi_imei_history_id INTEGER, + subscriber_attributes_id INTEGER, + PRIMARY KEY (id), + FOREIGN KEY(apn_id) REFERENCES apn (apn_id), + FOREIGN KEY(subscriber_routing_id) REFERENCES subscriber_routing (subscriber_routing_id), + FOREIGN KEY(serving_apn_id) REFERENCES serving_apn (serving_apn_id), + FOREIGN KEY(auc_id) REFERENCES auc (auc_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id), + FOREIGN KEY(ims_subscriber_id) REFERENCES ims_subscriber (ims_subscriber_id), + FOREIGN KEY(charging_rule_id) REFERENCES charging_rule (charging_rule_id), + FOREIGN KEY(tft_id) REFERENCES tft (tft_id), + FOREIGN KEY(eir_id) REFERENCES eir (eir_id), + FOREIGN KEY(imsi_imei_history_id) REFERENCES eir_history (imsi_imei_history_id), + FOREIGN KEY(subscriber_attributes_id) REFERENCES subscriber_attributes (subscriber_attributes_id) +); +CREATE TABLE serving_apn ( + serving_apn_id INTEGER NOT NULL, + subscriber_id INTEGER, + apn INTEGER, + pcrf_session_id VARCHAR(100), + subscriber_routing VARCHAR(100), + ip_version INTEGER, + serving_pgw VARCHAR(512), + serving_pgw_timestamp DATETIME, + serving_pgw_realm VARCHAR(512), + serving_pgw_peer VARCHAR(512), + last_modified VARCHAR(100), + PRIMARY KEY (serving_apn_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE, + FOREIGN KEY(apn) REFERENCES apn (apn_id) ON DELETE CASCADE +); +CREATE TABLE subscriber ( + subscriber_id INTEGER NOT NULL, + imsi VARCHAR(18), + enabled BOOLEAN, + auc_id INTEGER NOT NULL, + default_apn INTEGER NOT NULL, + apn_list VARCHAR(64) NOT NULL, + msisdn VARCHAR(18), + ue_ambr_dl INTEGER, + ue_ambr_ul INTEGER, + nam INTEGER, + subscribed_rau_tau_timer INTEGER, + serving_mme VARCHAR(512), + serving_mme_timestamp DATETIME, + serving_mme_realm VARCHAR(512), + serving_mme_peer VARCHAR(512), + last_modified VARCHAR(100), + PRIMARY KEY (subscriber_id), + UNIQUE (imsi), + FOREIGN KEY(auc_id) REFERENCES auc (auc_id), + FOREIGN KEY(default_apn) REFERENCES apn (apn_id) +); +CREATE TABLE subscriber_attributes ( + subscriber_attributes_id INTEGER NOT NULL, + subscriber_id INTEGER NOT NULL, + "key" VARCHAR(60), + last_modified VARCHAR(100), + value VARCHAR(12000), + PRIMARY KEY (subscriber_attributes_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE +); +CREATE TABLE subscriber_routing ( + subscriber_routing_id INTEGER NOT NULL, + subscriber_id INTEGER, + apn_id INTEGER, + ip_version INTEGER, + ip_address VARCHAR(254), + last_modified VARCHAR(100), + PRIMARY KEY (subscriber_routing_id), + UNIQUE (subscriber_id, apn_id), + FOREIGN KEY(subscriber_id) REFERENCES subscriber (subscriber_id) ON DELETE CASCADE, + FOREIGN KEY(apn_id) REFERENCES apn (apn_id) ON DELETE CASCADE +); +CREATE TABLE tft ( + tft_id INTEGER NOT NULL, + tft_group_id INTEGER NOT NULL, + tft_string VARCHAR(100) NOT NULL, + direction INTEGER NOT NULL, + last_modified VARCHAR(100), + PRIMARY KEY (tft_id) +); +COMMIT; diff --git a/tests/test_database_upgrade.py b/tests/test_database_upgrade.py index 6c235d4b..14ac3c23 100644 --- a/tests/test_database_upgrade.py +++ b/tests/test_database_upgrade.py @@ -4,6 +4,7 @@ import os import glob import re +import pytest from pathlib import Path from database import Database from logtool import LogTool @@ -99,6 +100,9 @@ def test_old_versions(tmpdir, monkeypatch): pattern = os.path.join(top_dir, "tests/db_schema/*.sql") for sql in glob.glob(pattern): + if sql.endswith("/20231009_release_1.0.0.sql"): + # See test_unsupported_1_0_0() below + continue if os.path.exists(test_db): os.unlink(test_db) @@ -117,3 +121,21 @@ def test_old_versions(tmpdir, monkeypatch): # Compare compare_with_latest_sql(tmpdir) + + +def test_unsupported_1_0_0(tmpdir, monkeypatch): + monkeypatch.setitem(config["database"], "database", test_db) + + if os.path.exists(test_db): + os.unlink(test_db) + + conn = sqlite3.connect(test_db) + sql = os.path.join(top_dir, "tests/db_schema/20231009_release_1.0.0.sql") + with open(sql) as f: + sql_script = f.read() + conn.executescript(sql_script) + conn.close() + + with pytest.raises(SystemExit) as e: + Database(LogTool(config), main_service=True) + assert e.value.code == 20 From 9ff22a2702ee8f1966f1516fa4c58e915abd9511 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 12 Dec 2025 14:19:09 +0100 Subject: [PATCH 09/16] DB: log schema version on start --- lib/databaseSchema.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/lib/databaseSchema.py b/lib/databaseSchema.py index c7d4888e..62c43903 100644 --- a/lib/databaseSchema.py +++ b/lib/databaseSchema.py @@ -117,12 +117,20 @@ def init_db(self): create_database(self.engine.url) self.base.metadata.create_all(self.engine) else: + version = self.get_version() self.logTool.log( service="Database", level="debug", - message="Database already created", + message=f"Database already created (schema version: {version})", ) - self.ensure_release_1_0_1_or_newer() + if version > self.latest: + self.logTool.log( + service="Database", + level="warning", + message=f"Database schema version {version} is higher than latest known version {self.latest}", + ) + else: + self.ensure_release_1_0_1_or_newer() def init_tables(self): # Create individual tables if they do not exist From 3b62df3575da4819e2040bc84572ff8c39a0814c Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Tue, 16 Dec 2025 16:04:28 +0100 Subject: [PATCH 10/16] DB: handle NULL in auc.algo The algo column is defined as: algo = Column( String(20), default='3', doc='2G Authentication Algorithm (1 = Comp128v1, 2 = Comp128v2, 3 = Comp128v3, All Other= 3G auth with 2g keys from 3G Milenage)', ) Notably: * It does not have nullable=False, meaning that NULL is allowed. * The default of 3 is not set at CREATE TABLE time, for that server_default would need to be used [1]. Handle the case that users may fill the database with a script that leaves the algo field empty and therefore defaults to NULL. Fix for: TypeError: int() argument must be a string, a bytes-like object or a real number, not 'NoneType' [1]: https://docs.sqlalchemy.org/en/20/core/defaults.html#sqlalchemy.schema.ColumnDefault --- lib/database.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/database.py b/lib/database.py index 4bcd7a5f..f00bf393 100755 --- a/lib/database.py +++ b/lib/database.py @@ -1638,7 +1638,8 @@ def Get_Vectors_AuC(self, auc_id, action, **kwargs): elif action == "2g3g": # Mask first bit of AMF key_data['amf'] = '0' + key_data['amf'][1:] - vect = S6a_crypt.generate_2g3g_vector(key_data['ki'], key_data['opc'], key_data['amf'], int(key_data['sqn']), int(key_data['algo'])) + algo = int(key_data["algo"]) if key_data["algo"] is not None else 3 + vect = S6a_crypt.generate_2g3g_vector(key_data['ki'], key_data['opc'], key_data['amf'], int(key_data['sqn']), algo) vector_list = [] self.logTool.log(service='Database', level='debug', message="Generating " + str(kwargs['requested_vectors']) + " vectors for GSM use", redisClient=self.redisMessaging) while kwargs['requested_vectors'] != 0: From dc545136e004040a9e583488547cbff302404b33 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 28 Nov 2025 09:58:20 +0100 Subject: [PATCH 11/16] lib/database: AUC: add sqn_ind_bitlen MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Prepare to implement the SQN counter as described in 3GPP TS 33.102 § C.1.1.2, instead of the currently used "sqn += 100" approach. According to the spec, SQN should look like this: SQN = SEQ | IND * SEQ is the number we increase with each new SQN. * IND is an array index. The upcoming patches will assign one IND to each client connected to PyHSS (MSC/VLR, SGSN, MME, ...). This is the same logic as in OsmoHLR. In order to do this, we need to know the subscriber-specific length of IND. § C.3.2 recommends the value 5, but it can be different. In the database, I use default=None instead of default=5, so we don't need to add migration logic for existing subscriber entries. Upcoming patches will check for None and use 5 in that case. --- lib/database.py | 1 + lib/databaseSchema.py | 10 +++++++++- tests/db_schema/latest.sql | 1 + tests/test_API.py | 1 + 4 files changed, 12 insertions(+), 1 deletion(-) diff --git a/lib/database.py b/lib/database.py index f00bf393..77316854 100755 --- a/lib/database.py +++ b/lib/database.py @@ -70,6 +70,7 @@ class AUC(Base): opc = Column(String(32), doc='SIM Key - Network Operators key OPc', nullable=False) amf = Column(String(4), doc='Authentication Management Field', nullable=False) sqn = Column(BigInteger, doc='Authentication sequence number') + sqn_ind_bitlen = Column(Integer, default=None, doc="Number of IND bits at lower SQN end (default is 5 if set to None)") iccid = Column(String(20), unique=True, doc='Integrated Circuit Card Identification Number') imsi = Column(String(18), unique=True, doc='International Mobile Subscriber Identity') batch_name = Column(String(20), doc='Name of SIM Batch') diff --git a/lib/databaseSchema.py b/lib/databaseSchema.py index 62c43903..c52b2584 100644 --- a/lib/databaseSchema.py +++ b/lib/databaseSchema.py @@ -8,7 +8,7 @@ class DatabaseSchema: - latest = 1 + latest = 2 def __init__(self, logTool, base, engine: Engine, main_service: bool): self.logTool = logTool @@ -203,5 +203,13 @@ def upgrade_from_20240603_release_1_0_1(self): self.add_column("subscriber", "serving_vlr_timestamp", "DATETIME") self.set_version(1) + def upgrade_to_v2(self): + if self.get_version() >= 2: + return + self.upgrade_msg(2) + self.add_column("auc", "sqn_ind_bitlen", "INTEGER") + self.set_version(2) + def upgrade_all(self): self.upgrade_from_20240603_release_1_0_1() + self.upgrade_to_v2() diff --git a/tests/db_schema/latest.sql b/tests/db_schema/latest.sql index 4c10b774..a3f6ebba 100644 --- a/tests/db_schema/latest.sql +++ b/tests/db_schema/latest.sql @@ -48,6 +48,7 @@ CREATE TABLE auc ( puk2 VARCHAR(20), sim_vendor VARCHAR(20), sqn BIGINT, + sqn_ind_bitlen INTEGER, PRIMARY KEY (auc_id), UNIQUE (iccid), UNIQUE (imsi) diff --git a/tests/test_API.py b/tests/test_API.py index 412c0ba6..a83c377b 100644 --- a/tests/test_API.py +++ b/tests/test_API.py @@ -96,6 +96,7 @@ class AUC_Tests(unittest.TestCase): "opc": '44d51018f65affc04e6d56d699df3a76', "amf": "8000", "sqn": 99, + "sqn_ind_bitlen": None, 'batch_name': None, 'esim': False, 'iccid': None, From 6324944f698547e7bd7f222e5a918a61185c3dc2 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Wed, 26 Nov 2025 15:29:00 +0100 Subject: [PATCH 12/16] lib/database: split up Get_Vectors_AuC --- lib/database.py | 190 +++++++++++++++++++++---------------- lib/diameter.py | 10 +- lib/gsup/controller/air.py | 6 +- services/apiService.py | 4 +- tests/test_database.py | 8 +- 5 files changed, 121 insertions(+), 97 deletions(-) diff --git a/lib/database.py b/lib/database.py index 77316854..808707a9 100755 --- a/lib/database.py +++ b/lib/database.py @@ -1581,96 +1581,120 @@ def Get_Served_PCRF_Subscribers(self, get_local_users_only=False): self.safe_close(session) return Served_Subs - def Get_Vectors_AuC(self, auc_id, action, **kwargs): - self.logTool.log(service='Database', level='debug', message="Getting Vectors for auc_id " + str(auc_id) + " with action " + str(action), redisClient=self.redisMessaging) + def Get_Vectors_AuC_air(self, auc_id, plmn): + self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action air", redisClient=self.redisMessaging) key_data = self.GetObj(AUC, auc_id) vector_dict = {} - - if action == "air": - rand, xres, autn, kasme = S6a_crypt.generate_eutran_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], kwargs['plmn']) - vector_dict['rand'] = rand - vector_dict['xres'] = xres - vector_dict['autn'] = autn - vector_dict['kasme'] = kasme - - #Incriment SQN - self.Update_AuC(auc_id, sqn=key_data['sqn']+100) - - return vector_dict - - elif action == "sqn_resync": - self.logTool.log(service='Database', level='debug', message="Resync SQN", redisClient=self.redisMessaging) - rand = kwargs['rand'] - sqn, mac_s = S6a_crypt.generate_resync_s6a(key_data['ki'], key_data['opc'], key_data['amf'], kwargs['auts'], rand) - self.logTool.log(service='Database', level='debug', message="SQN from resync: " + str(sqn) + " SQN in DB is " + str(key_data['sqn']) + "(Difference of " + str(int(sqn) - int(key_data['sqn'])) + ")", redisClient=self.redisMessaging) - self.Update_AuC(auc_id, sqn=sqn+100) - return - - elif action == "sip_auth": - rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], kwargs['plmn']) - self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging) - self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging) - vector_dict['SIP_Authenticate'] = rand + autn - vector_dict['xres'] = xres - vector_dict['ck'] = ck - vector_dict['ik'] = ik - self.Update_AuC(auc_id, sqn=key_data['sqn']+100) - return vector_dict - - elif action == "aka": - rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], kwargs['plmn']) - vector_list = [] - self.logTool.log(service='Database', level='debug', message="Generating " + str(kwargs['requested_vectors']) + " vectors for GSM use", redisClient=self.redisMessaging) - while kwargs['requested_vectors'] != 0: - self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging) - self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging) - - vector_dict['rand'] = binascii.hexlify(rand).decode("utf-8") - vector_dict['autn'] = binascii.hexlify(autn).decode("utf-8") - vector_dict['xres'] = binascii.hexlify(xres).decode("utf-8") - vector_dict['ck'] = binascii.hexlify(ck).decode("utf-8") - vector_dict['ik'] = binascii.hexlify(ik).decode("utf-8") - - kwargs['requested_vectors'] = kwargs['requested_vectors'] - 1 - vector_list.append(vector_dict) - self.Update_AuC(auc_id, sqn=key_data['sqn']+100) - return vector_list - - elif action == "2g3g": - # Mask first bit of AMF - key_data['amf'] = '0' + key_data['amf'][1:] - algo = int(key_data["algo"]) if key_data["algo"] is not None else 3 - vect = S6a_crypt.generate_2g3g_vector(key_data['ki'], key_data['opc'], key_data['amf'], int(key_data['sqn']), algo) - vector_list = [] - self.logTool.log(service='Database', level='debug', message="Generating " + str(kwargs['requested_vectors']) + " vectors for GSM use", redisClient=self.redisMessaging) - while kwargs['requested_vectors'] != 0: - kwargs['requested_vectors'] = kwargs['requested_vectors'] - 1 - vector_list.append(vect) - self.Update_AuC(auc_id, sqn=key_data['sqn']+100) - return vector_list - - elif action == "eap_aka": - rand, xres, autn, mac_a, ak = S6a_crypt.generate_eap_aka_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], kwargs['plmn']) + + rand, xres, autn, kasme = S6a_crypt.generate_eutran_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], plmn) + vector_dict['rand'] = rand + vector_dict['xres'] = xres + vector_dict['autn'] = autn + vector_dict['kasme'] = kasme + + #Incriment SQN + self.Update_AuC(auc_id, sqn=key_data['sqn']+100) + + return vector_dict + + def Get_Vectors_AuC_sqn_resync(self, auc_id, auts, rand): + self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action sqn_resync", redisClient=self.redisMessaging) + key_data = self.GetObj(AUC, auc_id) + + self.logTool.log(service='Database', level='debug', message="Resync SQN", redisClient=self.redisMessaging) + sqn, mac_s = S6a_crypt.generate_resync_s6a(key_data['ki'], key_data['opc'], key_data['amf'], auts, rand) + self.logTool.log(service='Database', level='debug', message="SQN from resync: " + str(sqn) + " SQN in DB is " + str(key_data['sqn']) + "(Difference of " + str(int(sqn) - int(key_data['sqn'])) + ")", redisClient=self.redisMessaging) + self.Update_AuC(auc_id, sqn=sqn+100) + + def Get_Vectors_AuC_sip_auth(self, auc_id, plmn): + self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action auc_id", redisClient=self.redisMessaging) + key_data = self.GetObj(AUC, auc_id) + vector_dict = {} + + rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], plmn) + self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging) + self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging) + vector_dict['SIP_Authenticate'] = rand + autn + vector_dict['xres'] = xres + vector_dict['ck'] = ck + vector_dict['ik'] = ik + self.Update_AuC(auc_id, sqn=key_data['sqn']+100) + + return vector_dict + + def Get_Vectors_AuC_aka(self, auc_id, plmn, requested_vectors): + self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action aka", redisClient=self.redisMessaging) + key_data = self.GetObj(AUC, auc_id) + vector_dict = {} + + rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], plmn) + vector_list = [] + self.logTool.log(service='Database', level='debug', message=f"Generating {requested_vectors} vectors for GSM use", redisClient=self.redisMessaging) + while requested_vectors != 0: self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging) self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging) + vector_dict['rand'] = binascii.hexlify(rand).decode("utf-8") vector_dict['autn'] = binascii.hexlify(autn).decode("utf-8") vector_dict['xres'] = binascii.hexlify(xres).decode("utf-8") - vector_dict['mac'] = binascii.hexlify(mac_a).decode("utf-8") - vector_dict['ak'] = binascii.hexlify(ak).decode("utf-8") - self.Update_AuC(auc_id, sqn=key_data['sqn']+100) - return vector_dict - - elif action == "Digest-MD5": - self.logTool.log(service='Database', level='debug', message="Generating Digest-MD5 Auth vectors", redisClient=self.redisMessaging) - self.logTool.log(service='Database', level='debug', message="key_data: " + str(key_data), redisClient=self.redisMessaging) - nonce = uuid.uuid4().hex - #nonce = "beef4d878f2642ed98afe491b943ca60" - vector_dict['nonce'] = nonce - vector_dict['SIP_Authenticate'] = key_data['ki'] - return vector_dict - else: - self.logTool.log(service='Database', level='error', message="Invalid action: " + str(action), redisClient=self.redisMessaging) + vector_dict['ck'] = binascii.hexlify(ck).decode("utf-8") + vector_dict['ik'] = binascii.hexlify(ik).decode("utf-8") + + requested_vectors -= 1 + vector_list.append(vector_dict) + self.Update_AuC(auc_id, sqn=key_data['sqn']+100) + + return vector_list + + def Get_Vectors_AuC_2g3g(self, auc_id, requested_vectors): + self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action 2g3g", redisClient=self.redisMessaging) + key_data = self.GetObj(AUC, auc_id) + vector_dict = {} + + # Mask first bit of AMF + key_data['amf'] = '0' + key_data['amf'][1:] + algo = int(key_data["algo"]) if key_data["algo"] is not None else 3 + vect = S6a_crypt.generate_2g3g_vector(key_data['ki'], key_data['opc'], key_data['amf'], int(key_data['sqn']), algo) + vector_list = [] + self.logTool.log(service='Database', level='debug', message=f"Generating {requested_vectors} vectors for GSM use", redisClient=self.redisMessaging) + + while requested_vectors != 0: + requested_vectors -= 1 + vector_list.append(vect) + self.Update_AuC(auc_id, sqn=key_data['sqn']+100) + + return vector_list + + def Get_Vectors_AuC_eap_aka(self, auc_id, plmn): + self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action eap_aka", redisClient=self.redisMessaging) + key_data = self.GetObj(AUC, auc_id) + vector_dict = {} + + rand, xres, autn, mac_a, ak = S6a_crypt.generate_eap_aka_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], plmn) + self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging) + self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging) + vector_dict['rand'] = binascii.hexlify(rand).decode("utf-8") + vector_dict['autn'] = binascii.hexlify(autn).decode("utf-8") + vector_dict['xres'] = binascii.hexlify(xres).decode("utf-8") + vector_dict['mac'] = binascii.hexlify(mac_a).decode("utf-8") + vector_dict['ak'] = binascii.hexlify(ak).decode("utf-8") + self.Update_AuC(auc_id, sqn=key_data['sqn']+100) + + return vector_dict + + def Get_Vectors_AuC_digest_md5(self, auc_id): + self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action Digest-MD5", redisClient=self.redisMessaging) + key_data = self.GetObj(AUC, auc_id) + vector_dict = {} + + self.logTool.log(service='Database', level='debug', message="Generating Digest-MD5 Auth vectors", redisClient=self.redisMessaging) + self.logTool.log(service='Database', level='debug', message="key_data: " + str(key_data), redisClient=self.redisMessaging) + nonce = uuid.uuid4().hex + #nonce = "beef4d878f2642ed98afe491b943ca60" + vector_dict['nonce'] = nonce + vector_dict['SIP_Authenticate'] = key_data['ki'] + + return vector_dict def Get_Gsup_SubscriberInfo(self, imsi: str) -> SubscriberInfo: subscriber = self.Get_Subscriber(imsi=imsi) diff --git a/lib/diameter.py b/lib/diameter.py index e220d03d..cf05f405 100755 --- a/lib/diameter.py +++ b/lib/diameter.py @@ -2333,7 +2333,7 @@ def Answer_16777251_318(self, packet_vars, avps): rand = str(sub_avp['misc_data'])[:32] rand = binascii.unhexlify(rand) #Calculate correct SQN - self.database.Get_Vectors_AuC(subscriber_details['auc_id'], "sqn_resync", auts=auts, rand=rand) + self.database.Get_Vectors_AuC_sqn_resync(subscriber_details['auc_id'], auts, rand) #Get number of requested vectors if sub_avp['avp_code'] == 1410: @@ -2348,7 +2348,7 @@ def Answer_16777251_318(self, packet_vars, avps): while requested_vectors != 0: self.logTool.log(service='HSS', level='debug', message="Generating vector number " + str(requested_vectors), redisClient=self.redisMessaging) plmn = self.get_avp_data(avps, 1407)[0] #Get PLMN from request - vector_dict = self.database.Get_Vectors_AuC(subscriber_details['auc_id'], "air", plmn=plmn) + vector_dict = self.database.Get_Vectors_AuC_air(subscriber_details['auc_id'], plmn) eutranvector = '' #This goes into the payload of AVP 10415 (Authentication info) eutranvector += self.generate_vendor_avp(1419, "c0", 10415, self.int_to_hex(requested_vectors, 4)) eutranvector += self.generate_vendor_avp(1447, "c0", 10415, vector_dict['rand']) #And is made up of other AVPs joined together with RAND @@ -3203,7 +3203,7 @@ def Answer_16777216_303(self, packet_vars, avps): auts = str(sub_avp_612['misc_data'])[32:] rand = str(sub_avp_612['misc_data'])[:32] rand = binascii.unhexlify(rand) - self.database.Get_Vectors_AuC(subscriber_details['auc_id'], "sqn_resync", auts=auts, rand=rand) + self.database.Get_Vectors_AuC_sqn_resync(subscriber_details['auc_id'], auts, rand) self.logTool.log(service='HSS', level='debug', message="Resynced SQN in DB", redisClient=self.redisMessaging) self.redisMessaging.sendMetric(serviceName='diameter', metricName='prom_diam_auth_event_count', metricType='counter', metricAction='inc', @@ -3232,7 +3232,7 @@ def Answer_16777216_303(self, packet_vars, avps): #Determine Vectors to Generate if auth_scheme == "Digest-MD5": self.logTool.log(service='HSS', level='debug', message="Generating MD5 Challenge", redisClient=self.redisMessaging) - vector_dict = self.database.Get_Vectors_AuC(subscriber_details['auc_id'], "Digest-MD5", username=imsi, plmn=plmn) + vector_dict = self.database.Get_Vectors_AuC_digest_md5(subscriber_details['auc_id']) avp_SIP_Item_Number = self.generate_vendor_avp(613, "c0", 10415, format(int(0),"x").zfill(8)) avp_SIP_Authentication_Scheme = self.generate_vendor_avp(608, "c0", 10415, str(binascii.hexlify(b'Digest-MD5'),'ascii')) #Nonce @@ -3242,7 +3242,7 @@ def Answer_16777216_303(self, packet_vars, avps): auth_data_item = avp_SIP_Item_Number + avp_SIP_Authentication_Scheme + avp_SIP_Authenticate + avp_SIP_Authorization else: self.logTool.log(service='HSS', level='debug', message="Generating AKA-MD5 Auth Challenge", redisClient=self.redisMessaging) - vector_dict = self.database.Get_Vectors_AuC(subscriber_details['auc_id'], "sip_auth", plmn=plmn) + vector_dict = self.database.Get_Vectors_AuC_sip_auth(subscriber_details['auc_id'], plmn) #diameter.3GPP-SIP-Auth-Data-Items: diff --git a/lib/gsup/controller/air.py b/lib/gsup/controller/air.py index a2b24ea5..d25df234 100644 --- a/lib/gsup/controller/air.py +++ b/lib/gsup/controller/air.py @@ -62,14 +62,14 @@ async def handle_message(self, peer: IPAPeer, message: GsupMessage): resync_required = rand is not None and auts is not None if resync_required: - self._database.Get_Vectors_AuC(subscriber['auc_id'], 'sqn_resync', rand=rand, auts=auts.hex()) + self._database.Get_Vectors_AuC_sqn_resync(subscriber['auc_id'], auts.hex(), rand) - # Use request_vectors=1 as Get_Vectors_AuC currently doesn't + # Use request_vectors=1 as Get_Vectors_AuC_2g3g currently doesn't # increment SEQ for each requested vector: # https://github.com/nickvsnetworking/pyhss/issues/266 vectors = [] for i in range(self.get_num_vectors_req(request_dict)): - vectors += self._database.Get_Vectors_AuC(subscriber['auc_id'], '2g3g', requested_vectors=1) + vectors += self._database.Get_Vectors_AuC_2g3g(subscriber['auc_id'], 1) response_msg = ((GsupMessageBuilder() .with_msg_type(MsgType.SEND_AUTH_INFO_RESULT)) diff --git a/services/apiService.py b/services/apiService.py index a2ab4cb3..dad7926b 100755 --- a/services/apiService.py +++ b/services/apiService.py @@ -498,7 +498,7 @@ def get(self, imsi, plmn): #Get data from AuC auc_data = databaseClient.Get_AuC(imsi=imsi) plmn = diameterClient.EncodePLMN(mcc=plmn[0:3], mnc=plmn[3:]) - vector_dict = databaseClient.Get_Vectors_AuC(auc_data['auc_id'], action='eap_aka', plmn=plmn) + vector_dict = databaseClient.Get_Vectors_AuC_eap_aka(auc_data['auc_id'], plmn) return vector_dict, 200 except Exception as E: print(E) @@ -512,7 +512,7 @@ def get(self, imsi, vector_count): #Get data from AuC auc_data = databaseClient.Get_AuC(imsi=imsi) plmn = diameterClient.EncodePLMN(mcc=config['hss']['MCC'], mnc=config['hss']['MNC']) - vector_dict = databaseClient.Get_Vectors_AuC(auc_data['auc_id'], action='aka', plmn=plmn, requested_vectors=int(vector_count)) + vector_dict = databaseClient.Get_Vectors_AuC_aka(auc_data['auc_id'], plmn, int(vector_count)) return vector_dict, 200 except Exception as E: print(E) diff --git a/tests/test_database.py b/tests/test_database.py index 742d6c05..69b2b6ca 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -123,8 +123,8 @@ def test_database(create_test_db): # Generate Vectors print("Generating Vectors") - database.Get_Vectors_AuC(auc_id, "air", plmn="12ff") - print(database.Get_Vectors_AuC(auc_id, "sip_auth", plmn="12ff")) + print(database.Get_Vectors_AuC_air(auc_id, "12ff")) + print(database.Get_Vectors_AuC_sip_auth(auc_id, "12ff")) # Update AuC database.Update_AuC(auc_id, sqn=100) @@ -225,10 +225,10 @@ def test_database(create_test_db): # Generate Vectors for IMS Subscriber print("Generating Vectors for IMS Subscriber") - print(database.Get_Vectors_AuC(auc_id, "sip_auth", plmn="12ff")) + print(database.Get_Vectors_AuC_sip_auth(auc_id, "12ff")) # print("Generating Resync for IMS Subscriber") - # print(Get_Vectors_AuC(auc_id, "sqn_resync", auts='7964347dfdfe432289522183fcfb', rand='1bc9f096002d3716c65e4e1f4c1c0d17')) + # print(Get_Vectors_AuC_sqn_resync(auc_id, '7964347dfdfe432289522183fcfb', '1bc9f096002d3716c65e4e1f4c1c0d17')) # Test getting APNs GetAPN_Result = database.Get_APN(GetSubscriber_Result["default_apn"]) From 47e9bfabf0021ea5bf5956c3257b4baf9e2efbf1 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 21 Nov 2025 09:17:39 +0100 Subject: [PATCH 13/16] GSUP: Osmocom IPA: request SERNR in identity req Ask connecting clients for the SERNR, just like OsmoHLR does it. This is in preparation for generating SQN from SEQ | IND, as IND will be assigned to each SERNR for GSUP (again following what OsmoHLR is doing). --- lib/gsup/protocol/osmocom_ipa.py | 4 ++-- tests/test_gsup_air.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/gsup/protocol/osmocom_ipa.py b/lib/gsup/protocol/osmocom_ipa.py index 6fc1cd38..1695923e 100644 --- a/lib/gsup/protocol/osmocom_ipa.py +++ b/lib/gsup/protocol/osmocom_ipa.py @@ -213,7 +213,7 @@ def req_identity(self, id_tags=None): """ encoded = bytearray() if id_tags is None: - id_tags = ['UNIT', 'MACADDR', 'TYPE', 'SWVERSION', 'LOCATION', 'UNITNAME'] + id_tags = ['UNIT', 'MACADDR', 'TYPE', 'SWVERSION', 'LOCATION', 'UNITNAME', 'SERNR'] for tag in id_tags: if tag not in self._IDTAG: @@ -250,4 +250,4 @@ def id_resp(self, data): """ Make ID_RESP CCM message """ - return self.add_header(data, self.PROTO['CCM'], self.MSGT['ID_RESP']) \ No newline at end of file + return self.add_header(data, self.PROTO['CCM'], self.MSGT['ID_RESP']) diff --git a/tests/test_gsup_air.py b/tests/test_gsup_air.py index c5996500..b7dc8b44 100644 --- a/tests/test_gsup_air.py +++ b/tests/test_gsup_air.py @@ -50,6 +50,7 @@ def connect(self): # Send the identity response to the server data = self.ipa.tag_unit(self.identity.encode('utf-8')) + data = data + self.ipa.tag_serial("TEST-00-00-00-00-00-00".encode('utf-8')) data = self.ipa.id_resp(data) self.sock.send(data) From 5691b28803d4a5b15e5d57dc7f9773345942fd3b Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 20 Nov 2025 15:38:30 +0100 Subject: [PATCH 14/16] Proper SQN (SEQ, IND) --- lib/SQN.py | 39 +++++++++ lib/database.py | 156 +++++++++++++++++++++++++++------- lib/diameter.py | 20 ++++- lib/gsup/controller/air.py | 4 +- lib/gsup/protocol/ipa_peer.py | 6 +- lib/gsup/server.py | 2 +- pyproject.toml | 1 + tests/db_schema/latest.sql | 16 ++++ tests/test_database.py | 11 ++- 9 files changed, 212 insertions(+), 43 deletions(-) create mode 100644 lib/SQN.py diff --git a/lib/SQN.py b/lib/SQN.py new file mode 100644 index 00000000..d3323da6 --- /dev/null +++ b/lib/SQN.py @@ -0,0 +1,39 @@ +# Copyright 2025 sysmocom - s.f.m.c. GmbH +# SPDX-License-Identifier: AGPL-3.0-or-later +from typing import Optional + + +class SQN: + def __init__(self, seq: int, ind: int, ind_bitlen: Optional[int]): + # 3GPP TS 33.102 § C.3.2 suggests 5 + if not ind_bitlen: + ind_bitlen = 5 + + self.seq = seq + self.ind = ind + self.ind_bitlen = ind_bitlen + + @classmethod + def from_sqn(cls, sqn: int, ind_bitlen: Optional[int]): + # 3GPP TS 33.102 § C.3.2 suggests 5 + if not ind_bitlen: + ind_bitlen = 5 + + seq = sqn >> ind_bitlen + + bitmask = (1 << ind_bitlen) - 1 + ind = sqn & bitmask + + return SQN(seq, ind, ind_bitlen) + + def get(self): + # FIXME: modulo for seq by max seq len + + # Modulo by ind_bitlen value range + ind = self.ind + ind &= (1 << self.ind_bitlen) - 1 + + return (self.seq << self.ind_bitlen) + ind + + def __repr__(self): + return f"SQN={self.get()}(SEQ={self.seq},IND={self.ind})" diff --git a/lib/database.py b/lib/database.py index 808707a9..29793dad 100755 --- a/lib/database.py +++ b/lib/database.py @@ -29,6 +29,7 @@ import socket import traceback from pyhss_config import config +from SQN import SQN Base = declarative_base() @@ -69,7 +70,7 @@ class AUC(Base): ki = Column(String(32), doc='SIM Key - Authentication Key - Ki', nullable=False) opc = Column(String(32), doc='SIM Key - Network Operators key OPc', nullable=False) amf = Column(String(4), doc='Authentication Management Field', nullable=False) - sqn = Column(BigInteger, doc='Authentication sequence number') + sqn = Column(BigInteger, doc='Authentication sequence number (deprecated)') sqn_ind_bitlen = Column(Integer, default=None, doc="Number of IND bits at lower SQN end (default is 5 if set to None)") iccid = Column(String(20), unique=True, doc='Integrated Circuit Card Identification Number') imsi = Column(String(18), unique=True, doc='International Mobile Subscriber Identity') @@ -291,6 +292,25 @@ class OPERATION_LOG_BASE(Base): table_name = Column('table_name', String(255)) __mapper_args__ = {'polymorphic_on': table_name} +class AUTH_SQN_IND(Base): + __tablename__ = "auth_sqn_ind" + ind_id = Column(Integer, primary_key=True) + client_name = Column(String(255), unique=True, nullable=False) + +class AUTH_SQN_SEQ(Base): + __tablename__ = "auth_sqn_seq" + __table_args__ = ( + UniqueConstraint("auc_id", "ind_id"), + ) + seq_id = Column(Integer, primary_key=True) + auc_id = Column(Integer, ForeignKey("auc.auc_id")) + ind_id = Column(Integer, ForeignKey("auth_sqn_ind.ind_id")) + seq = Column(BigInteger) + + def __init__(self, auc_id, ind_id): + self.auc_id = auc_id + self.ind_id = ind_id + class APN_OPERATION_LOG(OPERATION_LOG_BASE): __mapper_args__ = {'polymorphic_identity': 'apn'} apn = relationship("APN", back_populates="operation_logs") @@ -1581,56 +1601,132 @@ def Get_Served_PCRF_Subscribers(self, get_local_users_only=False): self.safe_close(session) return Served_Subs - def Get_Vectors_AuC_air(self, auc_id, plmn): + def Get_AUTH_SQN_IND(self, client_name: str) -> int: + with Session(self.engine) as session: + result = session.query(AUTH_SQN_IND).filter(AUTH_SQN_IND.client_name == client_name).one_or_none() + if result: + ind = result.ind_id + self.logTool.log( + service="Database", + level="debug", + message=f"Getting SQN IND for {client_name}: {ind} (previously assigned)", + redisClient=self.redisMessaging, + ) + return ind + + new_row = AUTH_SQN_IND(client_name=client_name) + session.add(new_row) + session.commit() + ind = new_row.ind_id + self.logTool.log( + service="Database", + level="debug", + message=f"Getting SQN IND for {client_name}: {ind} (new)", + redisClient=self.redisMessaging, + ) + return ind + + def Get_AUTH_SQNs(self, auc_data: dict, ind: int, count: int) -> list[int]: + if count < 1: + raise RuntimeError("Get_AUTH_SQNs: count must be > 0") + + ret = [] + auc_id = auc_data["auc_id"] + ind_bitlen = auc_data["sqn_ind_bitlen"] + + with Session(self.engine) as session: + # Get SEQ from database + seq_obj = session.query(AUTH_SQN_SEQ).filter_by( + auc_id=auc_id, + ind_id=ind, + ).one_or_none() + seq = seq_obj.seq if seq_obj else 0 + + # Generate SQNs with increased SEQ + sqn = SQN(seq, ind, ind_bitlen) + for i in range(count): + sqn.seq += 1 + ret += [sqn.get()] + self.logTool.log( + service="Database", + level="debug", + message=f"Get_AUTH_SQNs for auc_id={auc_id}: {sqn}", + ) + + # Update SEQ in database + if not seq_obj: + seq_obj = AUTH_SQN_SEQ(auc_id, ind) + seq_obj.seq = sqn.seq + session.merge(seq_obj) + session.commit() + + return ret + + def Get_AUTH_SQN(self, auc_data: dict, ind: int) -> int: + return self.Get_AUTH_SQNs(auc_data, ind, 1)[0] + + def Get_Vectors_AuC_air(self, auc_id, plmn, ind): self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action air", redisClient=self.redisMessaging) key_data = self.GetObj(AUC, auc_id) vector_dict = {} - rand, xres, autn, kasme = S6a_crypt.generate_eutran_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], plmn) + sqn = self.Get_AUTH_SQN(key_data, ind) + rand, xres, autn, kasme = S6a_crypt.generate_eutran_vector(key_data['ki'], key_data['opc'], key_data['amf'], sqn, plmn) vector_dict['rand'] = rand vector_dict['xres'] = xres vector_dict['autn'] = autn vector_dict['kasme'] = kasme - #Incriment SQN - self.Update_AuC(auc_id, sqn=key_data['sqn']+100) - return vector_dict def Get_Vectors_AuC_sqn_resync(self, auc_id, auts, rand): - self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action sqn_resync", redisClient=self.redisMessaging) + self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action sqn_resync") key_data = self.GetObj(AUC, auc_id) - self.logTool.log(service='Database', level='debug', message="Resync SQN", redisClient=self.redisMessaging) + self.logTool.log(service='Database', level='debug', message="Resync SQN") sqn, mac_s = S6a_crypt.generate_resync_s6a(key_data['ki'], key_data['opc'], key_data['amf'], auts, rand) - self.logTool.log(service='Database', level='debug', message="SQN from resync: " + str(sqn) + " SQN in DB is " + str(key_data['sqn']) + "(Difference of " + str(int(sqn) - int(key_data['sqn'])) + ")", redisClient=self.redisMessaging) - self.Update_AuC(auc_id, sqn=sqn+100) + ind_bitlen = key_data["sqn_ind_bitlen"] + sqn_obj = SQN.from_sqn(sqn, ind_bitlen) + self.logTool.log(service='Database', level='debug', message=f"New: {sqn_obj}") + + with Session(self.engine) as session: + seq_obj = session.query(AUTH_SQN_SEQ).filter_by( + auc_id=auc_id, + ind_id=sqn_obj.ind, + ).one_or_none() + if seq_obj: + self.logTool.log(service='Database', level='debug', message=f"Old: {SQN(seq_obj.seq, seq_obj.ind_id, ind_bitlen)}") + else: + seq_obj = AUTH_SQN_SEQ(auc_id, sqn_obj.ind_id) + seq_obj.seq = sqn_obj.seq + session.merge(seq_obj) + session.commit() - def Get_Vectors_AuC_sip_auth(self, auc_id, plmn): + def Get_Vectors_AuC_sip_auth(self, auc_id, plmn, ind): self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action auc_id", redisClient=self.redisMessaging) key_data = self.GetObj(AUC, auc_id) vector_dict = {} - rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], plmn) + sqn = self.Get_AUTH_SQN(key_data, ind) + rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], sqn, plmn) self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging) self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging) vector_dict['SIP_Authenticate'] = rand + autn vector_dict['xres'] = xres vector_dict['ck'] = ck vector_dict['ik'] = ik - self.Update_AuC(auc_id, sqn=key_data['sqn']+100) return vector_dict - def Get_Vectors_AuC_aka(self, auc_id, plmn, requested_vectors): - self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action aka", redisClient=self.redisMessaging) + def Get_Vectors_AuC_aka(self, auc_id, plmn, ind, count): + self.logTool.log(service='Database', level='debug', message=f"Getting {count} Vectors for auc_id {auc_id} with action aka", redisClient=self.redisMessaging) key_data = self.GetObj(AUC, auc_id) + sqns = self.Get_AUTH_SQNs(key_data, ind, count) vector_dict = {} - - rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], plmn) vector_list = [] - self.logTool.log(service='Database', level='debug', message=f"Generating {requested_vectors} vectors for GSM use", redisClient=self.redisMessaging) - while requested_vectors != 0: + + for i in range(count): + rand, autn, xres, ck, ik = S6a_crypt.generate_maa_vector(key_data['ki'], key_data['opc'], key_data['amf'], sqns[i], plmn) self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging) self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging) @@ -1640,37 +1736,34 @@ def Get_Vectors_AuC_aka(self, auc_id, plmn, requested_vectors): vector_dict['ck'] = binascii.hexlify(ck).decode("utf-8") vector_dict['ik'] = binascii.hexlify(ik).decode("utf-8") - requested_vectors -= 1 vector_list.append(vector_dict) - self.Update_AuC(auc_id, sqn=key_data['sqn']+100) return vector_list - def Get_Vectors_AuC_2g3g(self, auc_id, requested_vectors): - self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action 2g3g", redisClient=self.redisMessaging) + def Get_Vectors_AuC_2g3g(self, auc_id, ind, count): + self.logTool.log(service='Database', level='debug', message=f"Getting {count} Vectors for auc_id {auc_id} with action 2g3g", redisClient=self.redisMessaging) key_data = self.GetObj(AUC, auc_id) - vector_dict = {} # Mask first bit of AMF key_data['amf'] = '0' + key_data['amf'][1:] + algo = int(key_data["algo"]) if key_data["algo"] is not None else 3 - vect = S6a_crypt.generate_2g3g_vector(key_data['ki'], key_data['opc'], key_data['amf'], int(key_data['sqn']), algo) + sqns = self.Get_AUTH_SQNs(key_data, ind, count) vector_list = [] - self.logTool.log(service='Database', level='debug', message=f"Generating {requested_vectors} vectors for GSM use", redisClient=self.redisMessaging) - while requested_vectors != 0: - requested_vectors -= 1 + for i in range(count): + vect = S6a_crypt.generate_2g3g_vector(key_data['ki'], key_data['opc'], key_data['amf'], sqns[i], algo) vector_list.append(vect) - self.Update_AuC(auc_id, sqn=key_data['sqn']+100) return vector_list - def Get_Vectors_AuC_eap_aka(self, auc_id, plmn): + def Get_Vectors_AuC_eap_aka(self, auc_id, plmn, ind): self.logTool.log(service='Database', level='debug', message=f"Getting Vectors for auc_id {auc_id} with action eap_aka", redisClient=self.redisMessaging) key_data = self.GetObj(AUC, auc_id) + sqn = self.Get_AUTH_SQN(key_data, ind) vector_dict = {} - rand, xres, autn, mac_a, ak = S6a_crypt.generate_eap_aka_vector(key_data['ki'], key_data['opc'], key_data['amf'], key_data['sqn'], plmn) + rand, xres, autn, mac_a, ak = S6a_crypt.generate_eap_aka_vector(key_data['ki'], key_data['opc'], key_data['amf'], sqn, plmn) self.logTool.log(service='Database', level='debug', message="RAND is: " + str(rand), redisClient=self.redisMessaging) self.logTool.log(service='Database', level='debug', message="AUTN is: " + str(autn), redisClient=self.redisMessaging) vector_dict['rand'] = binascii.hexlify(rand).decode("utf-8") @@ -1678,7 +1771,6 @@ def Get_Vectors_AuC_eap_aka(self, auc_id, plmn): vector_dict['xres'] = binascii.hexlify(xres).decode("utf-8") vector_dict['mac'] = binascii.hexlify(mac_a).decode("utf-8") vector_dict['ak'] = binascii.hexlify(ak).decode("utf-8") - self.Update_AuC(auc_id, sqn=key_data['sqn']+100) return vector_dict diff --git a/lib/diameter.py b/lib/diameter.py index cf05f405..266ded8f 100755 --- a/lib/diameter.py +++ b/lib/diameter.py @@ -720,6 +720,20 @@ def get_avp_data(self, avps, avp_code): misc_data.append(sub_avp['misc_data']) return misc_data + def get_auth_sqn_ind_from_avp_data(self, avps): + try: + OriginHost = self.get_avp_data(avps, 264)[0] + OriginRealm = self.get_avp_data(avps, 296)[0] + return self.database.Get_AUTH_SQN_IND(f"diameter:host={originHost},realm={originRealm}") + except: + self.logTool.log( + service='HSS', + level='error', + message=f"Failed to get SQN IND: {traceback.format_exc()}", + redisClient=self.redisMessaging + ) + return 0 + def decode_diameter_packet_length(self, data): packet_vars = {} data = data.hex() @@ -2308,6 +2322,7 @@ def Answer_16777251_318(self, packet_vars, avps): requested_vectors = 1 EUTRAN_Authentication_Info = self.get_avp_data(avps, 1408) self.logTool.log(service='HSS', level='debug', message=f"authInfo: {EUTRAN_Authentication_Info}", redisClient=self.redisMessaging) + ind = self.get_auth_sqn_ind_from_avp_data(avps) if len(EUTRAN_Authentication_Info) > 0: EUTRAN_Authentication_Info = EUTRAN_Authentication_Info[0] self.logTool.log(service='HSS', level='debug', message="AVP: Requested-EUTRAN-Authentication-Info(1408) l=44 f=VM- vnd=TGPP", redisClient=self.redisMessaging) @@ -2348,7 +2363,7 @@ def Answer_16777251_318(self, packet_vars, avps): while requested_vectors != 0: self.logTool.log(service='HSS', level='debug', message="Generating vector number " + str(requested_vectors), redisClient=self.redisMessaging) plmn = self.get_avp_data(avps, 1407)[0] #Get PLMN from request - vector_dict = self.database.Get_Vectors_AuC_air(subscriber_details['auc_id'], plmn) + vector_dict = self.database.Get_Vectors_AuC_air(subscriber_details['auc_id'], plmn, ind) eutranvector = '' #This goes into the payload of AVP 10415 (Authentication info) eutranvector += self.generate_vendor_avp(1419, "c0", 10415, self.int_to_hex(requested_vectors, 4)) eutranvector += self.generate_vendor_avp(1447, "c0", 10415, vector_dict['rand']) #And is made up of other AVPs joined together with RAND @@ -3195,6 +3210,7 @@ def Answer_16777216_303(self, packet_vars, avps): mcc, mnc = imsi[0:3], imsi[3:5] plmn = self.EncodePLMN(mcc, mnc) + ind = self.get_auth_sqn_ind_from_avp_data(avps) #Determine if SQN Resync is required & auth type to use for sub_avp_612 in self.get_avp_data(avps, 612)[0]: @@ -3242,7 +3258,7 @@ def Answer_16777216_303(self, packet_vars, avps): auth_data_item = avp_SIP_Item_Number + avp_SIP_Authentication_Scheme + avp_SIP_Authenticate + avp_SIP_Authorization else: self.logTool.log(service='HSS', level='debug', message="Generating AKA-MD5 Auth Challenge", redisClient=self.redisMessaging) - vector_dict = self.database.Get_Vectors_AuC_sip_auth(subscriber_details['auc_id'], plmn) + vector_dict = self.database.Get_Vectors_AuC_sip_auth(subscriber_details['auc_id'], plmn, ind) #diameter.3GPP-SIP-Auth-Data-Items: diff --git a/lib/gsup/controller/air.py b/lib/gsup/controller/air.py index d25df234..002d9b73 100644 --- a/lib/gsup/controller/air.py +++ b/lib/gsup/controller/air.py @@ -59,6 +59,8 @@ async def handle_message(self, peer: IPAPeer, message: GsupMessage): subscriber = self._database.Get_Subscriber(imsi=imsi) rand = GsupMessageUtil.get_first_ie_by_name('rand', request_dict) auts = GsupMessageUtil.get_first_ie_by_name('auts', request_dict) + client_name = f"gsup:sernr={peer.tags['SERNR']}".rstrip("\x00") + ind = self._database.Get_AUTH_SQN_IND(client_name) resync_required = rand is not None and auts is not None if resync_required: @@ -69,7 +71,7 @@ async def handle_message(self, peer: IPAPeer, message: GsupMessage): # https://github.com/nickvsnetworking/pyhss/issues/266 vectors = [] for i in range(self.get_num_vectors_req(request_dict)): - vectors += self._database.Get_Vectors_AuC_2g3g(subscriber['auc_id'], 1) + vectors += self._database.Get_Vectors_AuC_2g3g(subscriber['auc_id'], ind, 1) response_msg = ((GsupMessageBuilder() .with_msg_type(MsgType.SEND_AUTH_INFO_RESULT)) diff --git a/lib/gsup/protocol/ipa_peer.py b/lib/gsup/protocol/ipa_peer.py index 1d71ecb6..8bdaba3a 100644 --- a/lib/gsup/protocol/ipa_peer.py +++ b/lib/gsup/protocol/ipa_peer.py @@ -67,5 +67,9 @@ def __init__(self, name: str, tags: dict, reader: StreamReader, writer: StreamWr raise ValueError( "Role not found in tags. 'sgsn' or 'msc' must appear in one of there tags: " + ', '.join( self._ROLE_PREFERENCE_TAGS)) + + if "SERNR" not in tags: + raise ValueError("Client didn't send tag SERNR") + def __str__(self): - return f"[{self.name} ({self.role.name})]" \ No newline at end of file + return f"[{self.name} ({self.role.name})]" diff --git a/lib/gsup/server.py b/lib/gsup/server.py index 0bef453b..9fce6ed8 100644 --- a/lib/gsup/server.py +++ b/lib/gsup/server.py @@ -239,4 +239,4 @@ async def _listen_for_subscriber_updates(self): except Exception as e: await self.logger.logAsync(service='GSUP', level='ERROR', - message=f"Error processing subscriber update: {traceback.format_exc()}") \ No newline at end of file + message=f"Error processing subscriber update: {traceback.format_exc()}") diff --git a/pyproject.toml b/pyproject.toml index 25490056..9f09536e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ filterwarnings = [ [tool.ruff] line-length = 120 include = [ + "lib/SQN.py", "lib/databaseSchema.py", "lib/pyhss_config.py", "tests/test_database_upgrade.py", diff --git a/tests/db_schema/latest.sql b/tests/db_schema/latest.sql index a3f6ebba..0eb19d15 100644 --- a/tests/db_schema/latest.sql +++ b/tests/db_schema/latest.sql @@ -53,6 +53,22 @@ CREATE TABLE auc ( UNIQUE (iccid), UNIQUE (imsi) ); +CREATE TABLE auth_sqn_ind ( + client_name VARCHAR(255) NOT NULL, + ind_id INTEGER NOT NULL, + PRIMARY KEY (ind_id), + UNIQUE (client_name) +); +CREATE TABLE auth_sqn_seq ( + auc_id INTEGER, + ind_id INTEGER, + seq BIGINT, + seq_id INTEGER NOT NULL, + PRIMARY KEY (seq_id), + UNIQUE (auc_id, ind_id), + FOREIGN KEY(auc_id) REFERENCES auc (auc_id), + FOREIGN KEY(ind_id) REFERENCES auth_sqn_ind (ind_id) +); CREATE TABLE charging_rule ( arp_preemption_capability BOOLEAN, arp_preemption_vulnerability BOOLEAN, diff --git a/tests/test_database.py b/tests/test_database.py index 69b2b6ca..1fd153bd 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -123,11 +123,9 @@ def test_database(create_test_db): # Generate Vectors print("Generating Vectors") - print(database.Get_Vectors_AuC_air(auc_id, "12ff")) - print(database.Get_Vectors_AuC_sip_auth(auc_id, "12ff")) - - # Update AuC - database.Update_AuC(auc_id, sqn=100) + ind = database.Get_AUTH_SQN_IND("test:test_database.py") + print(database.Get_Vectors_AuC_air(auc_id, "12ff", ind)) + print(database.Get_Vectors_AuC_sip_auth(auc_id, "12ff", ind)) # New Subscriber subscriber_json = { @@ -225,7 +223,8 @@ def test_database(create_test_db): # Generate Vectors for IMS Subscriber print("Generating Vectors for IMS Subscriber") - print(database.Get_Vectors_AuC_sip_auth(auc_id, "12ff")) + ind = database.Get_AUTH_SQN_IND("test:test_database.py") + print(database.Get_Vectors_AuC_sip_auth(auc_id, "12ff", ind)) # print("Generating Resync for IMS Subscriber") # print(Get_Vectors_AuC_sqn_resync(auc_id, '7964347dfdfe432289522183fcfb', '1bc9f096002d3716c65e4e1f4c1c0d17')) From 2156cd3a91efbf7dbfaa18e29a06973575b80776 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 28 Nov 2025 11:14:09 +0100 Subject: [PATCH 15/16] lib/gsup/controller/air: remove SEQ workaround Remove the workaround now that Get_Vectors_AuC_2g3g is properly increasing SEQ with each vector. --- lib/gsup/controller/air.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/lib/gsup/controller/air.py b/lib/gsup/controller/air.py index 002d9b73..ab8854b3 100644 --- a/lib/gsup/controller/air.py +++ b/lib/gsup/controller/air.py @@ -66,17 +66,15 @@ async def handle_message(self, peer: IPAPeer, message: GsupMessage): if resync_required: self._database.Get_Vectors_AuC_sqn_resync(subscriber['auc_id'], auts.hex(), rand) - # Use request_vectors=1 as Get_Vectors_AuC_2g3g currently doesn't - # increment SEQ for each requested vector: - # https://github.com/nickvsnetworking/pyhss/issues/266 - vectors = [] - for i in range(self.get_num_vectors_req(request_dict)): - vectors += self._database.Get_Vectors_AuC_2g3g(subscriber['auc_id'], ind, 1) - response_msg = ((GsupMessageBuilder() .with_msg_type(MsgType.SEND_AUTH_INFO_RESULT)) .with_ie('imsi', imsi)) + vectors = self._database.Get_Vectors_AuC_2g3g( + subscriber['auc_id'], + ind, + self.get_num_vectors_req(request_dict), + ) for vector in vectors: response_msg.with_ie('auth_tuple', [vector], False) From 82e765b12597d031f252921d7540076240ab0333 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 28 Nov 2025 15:22:57 +0100 Subject: [PATCH 16/16] HACK: lib/gsup/controller/air: traceback on error to fix this better, make a custom exception for subscriber not found and check against that, and only if there is a completely unexpected log the traceback. --- lib/gsup/controller/air.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/gsup/controller/air.py b/lib/gsup/controller/air.py index ab8854b3..3c505e53 100644 --- a/lib/gsup/controller/air.py +++ b/lib/gsup/controller/air.py @@ -91,7 +91,7 @@ async def handle_message(self, peer: IPAPeer, message: GsupMessage): .build(), ) except ValueError as e: - await self._logger.logAsync(service='GSUP', level='WARN', message=f"Subscriber not found: {imsi}") + await self._logger.logAsync(service='GSUP', level='WARN', message=f"Subscriber not found: {imsi}, {traceback.format_exc()}") await self._send_gsup_response( peer, GsupMessageBuilder().with_msg_type(MsgType.SEND_AUTH_INFO_ERROR)