|
| 1 | +#!/usr/bin/python3 |
| 2 | +# This script parses the files produced by engineblock and inserts them into a mySQL table where the SURFconext stats module will analyse the data further |
| 3 | +# This script is intended to be used during logrotate |
| 4 | +# It picks up all files starting with ebauth- (all rotated files) and parses them |
| 5 | + |
| 6 | +import os |
| 7 | +import sys |
| 8 | +import shutil |
| 9 | +import json |
| 10 | +import MySQLdb |
| 11 | +from dateutil.parser import parse |
| 12 | + |
| 13 | +mysql_host="{{ item.db_loglogins_host }}" |
| 14 | +mysql_user="{{ item.db_loglogins_user }}" |
| 15 | +mysql_password="{{ item.db_loglogins_password }}" |
| 16 | +mysql_db="{{ item.db_loglogins_name }}" |
| 17 | +workdir="{{ rsyslog_dir }}/log_logins/{{ item.name}}/" |
| 18 | + |
| 19 | +db = MySQLdb.connect(mysql_host,mysql_user,mysql_password,mysql_db ) |
| 20 | +cursor = db.cursor() |
| 21 | + |
| 22 | +def update_lastseen(user_id, date): |
| 23 | + query = """ |
| 24 | + INSERT INTO last_login (userid, lastseen) |
| 25 | + VALUES (%s, %s) |
| 26 | + ON DUPLICATE KEY UPDATE |
| 27 | + lastseen = GREATEST(lastseen, VALUES(lastseen)) |
| 28 | + """ |
| 29 | + try: |
| 30 | + cursor.execute(query, (user_id, date)) |
| 31 | + db.commit() |
| 32 | + except Exception as e: |
| 33 | + db.rollback() |
| 34 | + print(f"Error updating last_login for user {user_id}: {e}") |
| 35 | + |
| 36 | +def load_in_mysql(a,b,c,d,e,f,g,h): |
| 37 | + sql = """insert into log_logins(idpentityid,spentityid,loginstamp,userid,keyid,sessionid,requestid,trustedproxyentityid) values(%s,%s,%s,%s,%s,%s,%s,%s)""" |
| 38 | + try: |
| 39 | + cursor.execute(sql, (a,b,c,d,e,f,g,h)) |
| 40 | + db.commit() |
| 41 | + except: |
| 42 | + db.rollback() |
| 43 | + print(sql, (a,b,c,d,e,f,g,h)) |
| 44 | + |
| 45 | + |
| 46 | +def parse_lines(a): |
| 47 | + input_file = open((a), 'r') |
| 48 | + for line in input_file: |
| 49 | + try: |
| 50 | + jsonline = line.split(']:',2)[1] |
| 51 | + data = json.loads(jsonline) |
| 52 | + except: |
| 53 | + continue |
| 54 | + idp = data["context"]["idp_entity_id"] |
| 55 | + sp = data["context"]["sp_entity_id"] |
| 56 | + timestamp = data["context"]["login_stamp"] |
| 57 | + user_id = data["context"]["user_id"] |
| 58 | + key_id = data["context"]["key_id"] |
| 59 | + session_id = data["extra"]["session_id"] |
| 60 | + request_id = data["extra"]["request_id"] |
| 61 | + proxied_sp_entity_ids_list = data["context"]["proxied_sp_entity_ids"] |
| 62 | + proxied_sp_entity_ids = ''.join(proxied_sp_entity_ids_list) |
| 63 | + loginstamp=parse(timestamp).strftime("%Y-%m-%d %H:%M:%S") |
| 64 | + last_login_date = parse(timestamp).strftime("%Y-%m-%d") |
| 65 | + null = 'NULL' |
| 66 | + if proxied_sp_entity_ids: |
| 67 | + load_in_mysql(idp,proxied_sp_entity_ids,loginstamp,user_id,key_id,session_id,request_id,sp) |
| 68 | + else: |
| 69 | + load_in_mysql(idp,sp,loginstamp,user_id,key_id,session_id,request_id,null) |
| 70 | + update_lastseen(user_id, last_login_date) |
| 71 | + |
| 72 | +## Loop over the files and parse them one by one |
| 73 | +for filename in os.listdir(workdir): |
| 74 | + if os.path.isfile(workdir+filename) and filename.startswith("eb-authentication.log-") and not filename.endswith(".gz"): |
| 75 | + filetoparse=(os.path.join(workdir, filename)) |
| 76 | + parse_lines(filetoparse) |
| 77 | + else: |
| 78 | + continue |
| 79 | + |
| 80 | +cursor.close() |
| 81 | +db.close() |
0 commit comments