1+ #!/usr/bin/env python3
2+
3+ """
4+ Mod updater script
5+
6+ This script packs up the coop mod files, writes them to /opt/faf/data/content/legacy-featured-mod-files/.../, and updates the database.
7+
8+ Code is mostly self-explanatory - haha, fat chance! Read it from bottom to top and don't blink. Blink and you're dead, no wait where were we?
9+ To adapt this duct-tape based blob of shit for new mission voice overs, just change files array at the very bottom.
10+
11+ Environment variables required:
12+ PATCH_VERSION
13+ DATABASE_HOST
14+ DATABASE_NAME
15+ DATABASE_USERNAME
16+ DATABASE_PASSWORD
17+ """
18+ import glob
19+ import hashlib
20+ import json
21+ import os
22+ import re
23+ import shutil
24+ import subprocess
25+ import sys
26+ import tempfile
27+ import urllib .request
28+ import urllib .error
29+ import zipfile
30+
31+ import mysql .connector
32+
33+ FIXED_ZIP_TIMESTAMP = (1980 , 1 , 1 , 0 , 0 , 0 ) # year, month, day, hour, min, sec
34+
35+
36+ def get_db_connection ():
37+ """Establish and return a MySQL connection using environment variables."""
38+ host = os .getenv ("DATABASE_HOST" , "localhost" )
39+ db = os .getenv ("DATABASE_NAME" , "faf" )
40+ user = os .getenv ("DATABASE_USERNAME" , "root" )
41+ password = os .getenv ("DATABASE_PASSWORD" , "banana" )
42+
43+ return mysql .connector .connect (
44+ host = host ,
45+ user = user ,
46+ password = password ,
47+ database = db ,
48+ )
49+
50+
51+ def read_db (conn , mod ):
52+ """
53+ Read latest versions and md5's from db
54+ Returns dict {fileId: {version, name, md5}}
55+ """
56+ query = f"""
57+ SELECT uf.fileId, uf.version, uf.name, uf.md5
58+ FROM (
59+ SELECT fileId, MAX(version) AS version
60+ FROM updates_{ mod } _files
61+ GROUP BY fileId
62+ ) AS maxthings
63+ INNER JOIN updates_{ mod } _files AS uf
64+ ON maxthings.fileId = uf.fileId AND maxthings.version = uf.version;
65+ """
66+
67+ with conn .cursor () as cursor :
68+ cursor .execute (query )
69+
70+ oldfiles = {}
71+ for (fileId , version , name , md5 ) in cursor .fetchall ():
72+ oldfiles [int (fileId )] = {
73+ "version" : version ,
74+ "name" : name ,
75+ "md5" : md5 ,
76+ }
77+
78+ return oldfiles
79+
80+
81+ def update_db (conn , mod , fileId , version , name , md5 , dryrun ):
82+ """
83+ Delete and reinsert a file record in updates_{mod}_files
84+ """
85+ delete_query = f"DELETE FROM updates_{ mod } _files WHERE fileId=%s AND version=%s"
86+ insert_query = f"""
87+ INSERT INTO updates_{ mod } _files (fileId, version, name, md5, obselete)
88+ VALUES (%s, %s, %s, %s, 0)
89+ """
90+
91+ print (f"Updating DB for { name } (fileId={ fileId } , version={ version } )" )
92+
93+ if not dryrun :
94+ try :
95+ with conn .cursor () as cursor :
96+ cursor .execute (delete_query , (fileId , version ))
97+ cursor .execute (insert_query , (fileId , version , name , md5 ))
98+ conn .commit ()
99+ except mysql .connector .Error as err :
100+ print (f"MySQL error while updating { name } : { err } " )
101+ conn .rollback ()
102+ exit (1 )
103+ else :
104+ print (f"Dryrun: would run for { name } " )
105+
106+ def calc_md5 (fname ):
107+ hash_md5 = hashlib .md5 ()
108+ with open (fname , "rb" ) as f :
109+ for chunk in iter (lambda : f .read (4096 ), b"" ):
110+ hash_md5 .update (chunk )
111+ return hash_md5 .hexdigest ()
112+
113+ def zipdir (path , ziph ):
114+ if not os .path .exists (path ):
115+ print (f"Warning: { path } does not exist, skipping" )
116+ return
117+
118+ if os .path .isdir (path ):
119+ for root , dirs , files in os .walk (path ):
120+ files .sort () # deterministic order
121+ dirs .sort () # deterministic order
122+ for file in files :
123+ full_path = os .path .join (root , file )
124+ arcname = os .path .relpath (full_path , start = path ) # preserve folder structure
125+ info = zipfile .ZipInfo (arcname , FIXED_ZIP_TIMESTAMP )
126+ with open (full_path , "rb" ) as f :
127+ data = f .read ()
128+ ziph .writestr (info , data , compress_type = zipfile .ZIP_DEFLATED )
129+ else :
130+ # single file outside a directory
131+ arcname = os .path .basename (path )
132+ info = zipfile .ZipInfo (arcname , FIXED_ZIP_TIMESTAMP )
133+ with open (path , "rb" ) as f :
134+ data = f .read ()
135+ ziph .writestr (info , data , compress_type = zipfile .ZIP_DEFLATED )
136+
137+
138+
139+ def create_file (conn , mod , fileId , version , name , source , target_dir , old_md5 , dryrun ):
140+ """Pack or copy files, compare MD5, update DB if changed."""
141+ target_dir = os .path .join (target_dir , f"updates_{ mod } _files" )
142+ os .makedirs (target_dir , exist_ok = True )
143+
144+ name = name .format (version )
145+ target_name = os .path .join (target_dir , name )
146+
147+ print (f"Processing { name } (fileId { fileId } )" )
148+
149+ if isinstance (source , list ):
150+ print (f"Zipping { source } -> { target_name } " )
151+ fd , fname = tempfile .mkstemp ("_" + name , "patcher_" )
152+ os .close (fd )
153+ with zipfile .ZipFile (fname , "w" , zipfile .ZIP_DEFLATED ) as zf :
154+ for sm in source :
155+ zipdir (sm , zf )
156+ rename = True
157+ checksum = calc_md5 (fname )
158+ else :
159+ rename = False
160+ fname = source
161+ if source is None :
162+ checksum = calc_md5 (target_name ) if os .path .exists (target_name ) else None
163+ else :
164+ checksum = calc_md5 (fname )
165+
166+ if checksum is None :
167+ print (f"Skipping { name } (no source file and no existing file to checksum)" )
168+ return
169+
170+ print (f"Compared checksums: Old { old_md5 } New { checksum } " )
171+
172+ if checksum != old_md5 :
173+ if fname is not None :
174+ print (f"Copying { fname } -> { target_name } " )
175+ if not dryrun :
176+ shutil .copy (fname , target_name )
177+ elif rename :
178+ print (f"Dry run, not moving tempfile. Please delete { fname } ." )
179+ else :
180+ print ("No source file, not moving" )
181+
182+ if os .path .exists (target_name ):
183+ update_db (conn , mod , fileId , version , name , checksum , dryrun )
184+ if not dryrun :
185+ try :
186+ os .chmod (target_name , 0o664 )
187+ except PermissionError :
188+ print (f"Warning: Could not chmod { target_name } " )
189+ else :
190+ print (f"Target file { target_name } does not exist, not updating db" )
191+
192+ else :
193+ print ("New {} file is identical to current version - skipping update" .format (name ))
194+ if rename :
195+ if not dryrun :
196+ os .unlink (fname )
197+ else :
198+ print ('Dry run, not moving tempfile. Please delete {}.' .format (fname ))
199+
200+
201+ def do_files (conn , mod , version , files , target_dir , dryrun ):
202+ """Process all files for given mod/version."""
203+ current_files = read_db (conn , mod )
204+ for name , fileId , source in files :
205+ old_md5 = current_files .get (fileId , {}).get ("md5" )
206+ create_file (conn , mod , fileId , version , name , source , target_dir , old_md5 , dryrun )
207+
208+
209+ def prepare_repo ():
210+ """Clone or update the fa-coop repository and checkout the specified ref."""
211+ repo_url = os .getenv ("GIT_REPO_URL" , "https://github.com/FAForever/fa-coop.git" )
212+ git_ref = os .getenv ("GIT_REF" , "v" + os .getenv ("PATCH_VERSION" ))
213+ workdir = os .getenv ("GIT_WORKDIR" , "/tmp/fa-coop" )
214+
215+ if not git_ref :
216+ print ("Error: GIT_REF or PATCH_VERSION must be specified." )
217+ sys .exit (1 )
218+
219+ print (f"=== Preparing repository { repo_url } at ref { git_ref } in { workdir } ===" )
220+
221+ # Clone if not exists
222+ if not os .path .isdir (os .path .join (workdir , ".git" )):
223+ print (f"Cloning repository into { workdir } ..." )
224+ subprocess .check_call (["git" , "clone" , repo_url , workdir ])
225+ else :
226+ print (f"Repository already exists in { workdir } , fetching latest changes..." )
227+ subprocess .check_call (["git" , "-C" , workdir , "fetch" , "--all" , "--tags" ])
228+
229+ # Checkout the desired ref
230+ print (f"Checking out { git_ref } ..." )
231+ subprocess .check_call (["git" , "-C" , workdir , "fetch" , "--tags" ])
232+ subprocess .check_call (["git" , "-C" , workdir , "checkout" , git_ref ])
233+
234+ print (f"=== Repository ready at { workdir } ===" )
235+ return workdir
236+
237+
238+ def download_vo_assets (version , target_dir ):
239+ """
240+ Download VO .nx2 files from latest GitHub release of fa-coop,
241+ rename them for the given patch version, and copy to target directory.
242+ """
243+ os .makedirs (target_dir , exist_ok = True )
244+ print (f"Fetching VO assets for patch version { version } ..." )
245+
246+ # 1. Get latest release JSON from GitHub
247+ api_url = "https://api.github.com/repos/FAForever/fa-coop/releases/latest"
248+ with urllib .request .urlopen (api_url ) as response :
249+ release_info = json .load (response )
250+
251+ # 2. Filter assets ending with .nx2
252+ nx2_urls = [
253+ asset ["browser_download_url" ]
254+ for asset in release_info .get ("assets" , [])
255+ if asset ["browser_download_url" ].endswith (".nx2" )
256+ ]
257+
258+ if not nx2_urls :
259+ print ("No VO .nx2 assets found in the latest release." )
260+ return
261+
262+ temp_dir = os .path .join ("/tmp" , f"vo_download_{ version } " )
263+ os .makedirs (temp_dir , exist_ok = True )
264+
265+ # 3. Download each .nx2 file
266+ for url in nx2_urls :
267+ filename = os .path .basename (url )
268+ dest_path = os .path .join (temp_dir , filename )
269+ print (f"Downloading { url } -> { dest_path } " )
270+ urllib .request .urlretrieve (url , dest_path )
271+
272+ # 4. Rename files to include patch version (e.g., A01_VO.v49.nx2)
273+ for filepath in glob .glob (os .path .join (temp_dir , "*.nx2" )):
274+ base = os .path .basename (filepath )
275+ # Insert .vXX. before the extension
276+ new_name = re .sub (r"\.nx2$" , f".v{ version } .nx2" , base )
277+ new_path = os .path .join (temp_dir , new_name )
278+ os .rename (filepath , new_path )
279+
280+ # 5. Copy to target directory
281+ for filepath in glob .glob (os .path .join (temp_dir , "*.nx2" )):
282+ target_path = os .path .join (target_dir , os .path .basename (filepath ))
283+ print (f"Copying { filepath } -> { target_path } " )
284+ shutil .copy (filepath , target_path )
285+ # Set permissions like in your script
286+ os .chmod (target_path , 0o664 )
287+ try :
288+ shutil .chown (target_path , group = "www-data" )
289+ except Exception :
290+ print (f"Warning: could not chown { target_path } , continue..." )
291+
292+ print ("VO assets processed successfully." )
293+
294+
295+ def main ():
296+ mod = "coop"
297+ dryrun = os .getenv ("DRY_RUN" , "false" ).lower () in ("1" , "true" , "yes" )
298+ version = os .getenv ("PATCH_VERSION" )
299+
300+ if version is None :
301+ print ('Please pass patch version in environment variable PATCH_VERSION' )
302+ sys .exit (1 )
303+
304+ print (f"=== Starting mod updater for version { version } , dryrun={ dryrun } ===" )
305+
306+ # /updater_{mod}_files will be appended by create_file
307+ target_dir = '/tmp/legacy-featured-mod-files'
308+
309+ # Prepare git repo
310+ repo_dir = prepare_repo ()
311+
312+ # Download VO assets
313+ vo_dir = os .path .join (target_dir , f"updates_{ mod } _files" )
314+ download_vo_assets (version , vo_dir )
315+
316+ # target filename / fileId in updates_{mod}_files table / source files with version placeholder
317+ # if source files is single string, file is copied directly
318+ # if source files is a list, files are zipped
319+ files = [
320+ ('init_coop.v{}.lua' , 1 , os .path .join (repo_dir , 'init_coop.lua' )),
321+ ('lobby_coop_v{}.cop' , 2 , [
322+ os .path .join (repo_dir , 'lua' ),
323+ os .path .join (repo_dir , 'mods' ),
324+ os .path .join (repo_dir , 'units' ),
325+ os .path .join (repo_dir , 'mod_info.lua' ),
326+ os .path .join (repo_dir , 'readme.md' ),
327+ os .path .join (repo_dir , 'changelog.md' ),
328+ ]),
329+ ('A01_VO.v{}.nx2' , 3 , None ),
330+ ('A02_VO.v{}.nx2' , 4 , None ),
331+ ('A03_VO.v{}.nx2' , 5 , None ),
332+ ('A04_VO.v{}.nx2' , 6 , None ),
333+ ('A05_VO.v{}.nx2' , 7 , None ),
334+ ('A06_VO.v{}.nx2' , 8 , None ),
335+ ('C01_VO.v{}.nx2' , 9 , None ),
336+ ('C02_VO.v{}.nx2' , 10 , None ),
337+ ('C03_VO.v{}.nx2' , 11 , None ),
338+ ('C04_VO.v{}.nx2' , 12 , None ),
339+ ('C05_VO.v{}.nx2' , 13 , None ),
340+ ('C06_VO.v{}.nx2' , 14 , None ),
341+ ('E01_VO.v{}.nx2' , 15 , None ),
342+ ('E02_VO.v{}.nx2' , 16 , None ),
343+ ('E03_VO.v{}.nx2' , 17 , None ),
344+ ('E04_VO.v{}.nx2' , 18 , None ),
345+ ('E05_VO.v{}.nx2' , 19 , None ),
346+ ('E06_VO.v{}.nx2' , 20 , None ),
347+ ('Prothyon16_VO.v{}.nx2' , 21 , None ),
348+ ('TCR_VO.v{}.nx2' , 22 , None ),
349+ ('SCCA_Briefings.v{}.nx2' , 23 , None ),
350+ ('SCCA_FMV.nx2.v{}.nx2' , 24 , None ),
351+ ('FAF_Coop_Operation_Tight_Spot_VO.v{}.nx2' , 25 , None ),
352+ ]
353+
354+ conn = get_db_connection ()
355+ try :
356+ do_files (conn , mod , version , files , target_dir , dryrun )
357+ finally :
358+ conn .close ()
359+
360+
361+ if __name__ == "__main__" :
362+ main ()
0 commit comments