11#!/usr/bin/env python3
2+ from __future__ import annotations
23
34import glob
45import os
89import subprocess
910import sys
1011import zipfile
12+ from pathlib import Path
1113from typing import Any , Dict , Iterable , Iterator , List , Set , Tuple , Union
12-
13- try :
14- from urllib import urlretrieve # type: ignore
15- except ImportError :
16- from urllib .request import urlretrieve
14+ from urllib .request import urlretrieve
1715
1816from cffi import FFI # type: ignore
1917
20- sys .path .append (os . path . dirname ( __file__ ))
18+ sys .path .append (str ( Path ( __file__ ). parent )) # Allow importing local modules.
2119
2220import parse_sdl2 # noqa: E402
2321
@@ -52,18 +50,18 @@ class ParsedHeader:
5250 """
5351
5452 # Class dictionary of all parsed headers.
55- all_headers = {} # type : Dict[str, " ParsedHeader"]
53+ all_headers : Dict [Path , ParsedHeader ] = {}
5654
57- def __init__ (self , path : str ) -> None :
58- self .path = path = os . path .normpath ( path )
59- directory = os . path .dirname ( path )
55+ def __init__ (self , path : Path ) -> None :
56+ self .path = path = path .resolve ( True )
57+ directory = path .parent
6058 depends = set ()
6159 with open (self .path , "r" , encoding = "utf-8" ) as f :
6260 header = f .read ()
6361 header = RE_COMMENT .sub ("" , header )
6462 header = RE_CPLUSPLUS .sub ("" , header )
6563 for dependency in RE_INCLUDE .findall (header ):
66- depends .add (os . path . normpath ( os . path . join ( directory , dependency )))
64+ depends .add (( directory / dependency ). resolve ( True ))
6765 header = RE_PREPROCESSOR .sub ("" , header )
6866 header = RE_TAGS .sub ("" , header )
6967 header = RE_VAFUNC .sub ("" , header )
@@ -87,25 +85,25 @@ def __str__(self) -> str:
8785 )
8886
8987 def __repr__ (self ) -> str :
90- return "ParsedHeader(%s)" % ( self .path ,)
88+ return f "ParsedHeader({ self .path } )"
9189
9290
9391def walk_includes (directory : str ) -> Iterator [ParsedHeader ]:
9492 """Parse all the include files in a directory and subdirectories."""
95- for path , dirs , files in os .walk (directory ):
93+ for path , _dirs , files in os .walk (directory ):
9694 for file in files :
9795 if file in HEADER_PARSE_EXCLUDES :
9896 continue
9997 if file .endswith (".h" ):
100- yield ParsedHeader (os . path . join (path , file ))
98+ yield ParsedHeader (Path (path , file ). resolve ( True ))
10199
102100
103101def resolve_dependencies (
104102 includes : Iterable [ParsedHeader ],
105103) -> List [ParsedHeader ]:
106104 """Sort headers by their correct include order."""
107105 unresolved = set (includes )
108- resolved = set () # type : Set[ParsedHeader]
106+ resolved : Set [ParsedHeader ] = set ()
109107 result = []
110108 while unresolved :
111109 for item in unresolved :
@@ -115,7 +113,7 @@ def resolve_dependencies(
115113 if not unresolved & resolved :
116114 raise RuntimeError (
117115 "Could not resolve header load order.\n "
118- "Possible cyclic dependency with the unresolved headers:\n %s" % ( unresolved ,)
116+ f "Possible cyclic dependency with the unresolved headers:\n { unresolved } "
119117 )
120118 unresolved -= resolved
121119 return result
@@ -124,49 +122,50 @@ def resolve_dependencies(
124122def parse_includes () -> List [ParsedHeader ]:
125123 """Collect all parsed header files and return them.
126124
127- Reads HEADER_PARSE_PATHS and HEADER_PARSE_EXCLUDES."""
128- includes = [] # type: List[ParsedHeader]
125+ Reads HEADER_PARSE_PATHS and HEADER_PARSE_EXCLUDES.
126+ """
127+ includes : List [ParsedHeader ] = []
129128 for dirpath in HEADER_PARSE_PATHS :
130129 includes .extend (walk_includes (dirpath ))
131130 return resolve_dependencies (includes )
132131
133132
134133def walk_sources (directory : str ) -> Iterator [str ]:
135- for path , dirs , files in os .walk (directory ):
134+ for path , _dirs , files in os .walk (directory ):
136135 for source in files :
137136 if source .endswith (".c" ):
138- yield os . path . join ( path , source )
137+ yield str ( Path ( path , source ) )
139138
140139
141- def get_sdl2_file (version : str ) -> str :
140+ def get_sdl2_file (version : str ) -> Path :
142141 if sys .platform == "win32" :
143- sdl2_file = "SDL2-devel-%s -VC.zip" % ( version ,)
142+ sdl2_file = f "SDL2-devel-{ version } -VC.zip"
144143 else :
145144 assert sys .platform == "darwin"
146- sdl2_file = "SDL2-%s .dmg" % ( version ,)
147- sdl2_local_file = os . path . join ("dependencies" , sdl2_file )
148- sdl2_remote_file = "https://www.libsdl.org/release/%s" % sdl2_file
149- if not os . path . exists (sdl2_local_file ):
150- print ("Downloading %s" % sdl2_remote_file )
145+ sdl2_file = f "SDL2-{ version } .dmg"
146+ sdl2_local_file = Path ("dependencies" , sdl2_file )
147+ sdl2_remote_file = f "https://www.libsdl.org/release/{ sdl2_file } "
148+ if not sdl2_local_file . exists ():
149+ print (f "Downloading { sdl2_remote_file } " )
151150 os .makedirs ("dependencies/" , exist_ok = True )
152151 urlretrieve (sdl2_remote_file , sdl2_local_file )
153152 return sdl2_local_file
154153
155154
156- def unpack_sdl2 (version : str ) -> str :
157- sdl2_path = "dependencies/SDL2-%s" % ( version , )
155+ def unpack_sdl2 (version : str ) -> Path :
156+ sdl2_path = Path ( f "dependencies/SDL2-{ version } " )
158157 if sys .platform == "darwin" :
159158 sdl2_dir = sdl2_path
160- sdl2_path + = "/ SDL2.framework"
161- if os . path . exists (sdl2_path ):
159+ sdl2_path / = "SDL2.framework"
160+ if sdl2_path . exists ():
162161 return sdl2_path
163162 sdl2_arc = get_sdl2_file (version )
164- print ("Extracting %s" % sdl2_arc )
165- if sdl2_arc .endswith ( ".zip" ) :
163+ print (f "Extracting { sdl2_arc } " )
164+ if sdl2_arc .suffix == ".zip" :
166165 with zipfile .ZipFile (sdl2_arc ) as zf :
167166 zf .extractall ("dependencies/" )
168167 elif sys .platform == "darwin" :
169- assert sdl2_arc .endswith ( ".dmg" )
168+ assert sdl2_arc .suffix == ".dmg"
170169 subprocess .check_call (["hdiutil" , "mount" , sdl2_arc ])
171170 subprocess .check_call (["mkdir" , "-p" , sdl2_dir ])
172171 subprocess .check_call (["cp" , "-r" , "/Volumes/SDL2/SDL2.framework" , sdl2_dir ])
@@ -187,11 +186,11 @@ def unpack_sdl2(version: str) -> str:
187186extra_parse_args = []
188187extra_compile_args = []
189188extra_link_args = []
190- sources = [] # type: List[str ]
189+ sources : List [ str ] = [ ]
191190
192191libraries = []
193192library_dirs : List [str ] = []
194- define_macros = [("Py_LIMITED_API" , 0x03060000 )] # type: List[Tuple[str, Any] ]
193+ define_macros : List [ Tuple [ str , Any ]] = [("Py_LIMITED_API" , 0x03060000 )]
195194
196195sources += walk_sources ("tcod/" )
197196sources += walk_sources ("libtcod/src/libtcod/" )
@@ -219,9 +218,9 @@ def unpack_sdl2(version: str) -> str:
219218 include_dirs .append ("libtcod/src/zlib/" )
220219
221220if sys .platform == "win32" :
222- SDL2_INCLUDE = os . path . join (SDL2_PARSE_PATH , "include" )
221+ SDL2_INCLUDE = Path (SDL2_PARSE_PATH , "include" )
223222elif sys .platform == "darwin" :
224- SDL2_INCLUDE = os . path . join (SDL2_PARSE_PATH , "Versions/A/Headers" )
223+ SDL2_INCLUDE = Path (SDL2_PARSE_PATH , "Versions/A/Headers" )
225224else :
226225 matches = re .findall (
227226 r"-I(\S+)" ,
@@ -231,43 +230,40 @@ def unpack_sdl2(version: str) -> str:
231230
232231 SDL2_INCLUDE = None
233232 for match in matches :
234- if os . path . isfile ( os . path . join ( match , "SDL_stdinc.h" )):
233+ if Path ( match , "SDL_stdinc.h" ). is_file ( ):
235234 SDL2_INCLUDE = match
236235 assert SDL2_INCLUDE
237236
238237if sys .platform == "win32" :
239- include_dirs .append (SDL2_INCLUDE )
238+ include_dirs .append (str ( SDL2_INCLUDE ) )
240239 ARCH_MAPPING = {"32bit" : "x86" , "64bit" : "x64" }
241- SDL2_LIB_DIR = os . path . join (SDL2_BUNDLE_PATH , "lib/" , ARCH_MAPPING [BITSIZE ])
242- library_dirs .append (SDL2_LIB_DIR )
243- SDL2_LIB_DEST = os . path . join ("tcod" , ARCH_MAPPING [BITSIZE ])
244- if not os . path . exists (SDL2_LIB_DEST ):
240+ SDL2_LIB_DIR = Path (SDL2_BUNDLE_PATH , "lib/" , ARCH_MAPPING [BITSIZE ])
241+ library_dirs .append (str ( SDL2_LIB_DIR ) )
242+ SDL2_LIB_DEST = Path ("tcod" , ARCH_MAPPING [BITSIZE ])
243+ if not SDL2_LIB_DEST . exists ():
245244 os .mkdir (SDL2_LIB_DEST )
246- shutil .copy (os . path . join (SDL2_LIB_DIR , "SDL2.dll" ), SDL2_LIB_DEST )
245+ shutil .copy (Path (SDL2_LIB_DIR , "SDL2.dll" ), SDL2_LIB_DEST )
247246
248247
249- def fix_header (filepath : str ) -> None :
248+ def fix_header (path : Path ) -> None :
250249 """Removes leading whitespace from a MacOS header file.
251250
252251 This whitespace is causing issues with directives on some platforms.
253252 """
254- with open (filepath , "r+" , encoding = "utf-8" ) as f :
255- current = f .read ()
256- fixed = "\n " .join (line .strip () for line in current .split ("\n " ))
257- if current == fixed :
258- return
259- f .seek (0 )
260- f .truncate ()
261- f .write (fixed )
253+ current = path .read_text (encoding = "utf-8" )
254+ fixed = "\n " .join (line .strip () for line in current .split ("\n " ))
255+ if current == fixed :
256+ return
257+ path .write_text (fixed , encoding = "utf-8" )
262258
263259
264260if sys .platform == "darwin" :
265- HEADER_DIR = os . path . join (SDL2_PARSE_PATH , "Headers" )
266- fix_header (os . path . join (HEADER_DIR , "SDL_assert.h" ))
267- fix_header (os . path . join (HEADER_DIR , "SDL_config_macosx.h" ))
261+ HEADER_DIR = Path (SDL2_PARSE_PATH , "Headers" )
262+ fix_header (Path (HEADER_DIR , "SDL_assert.h" ))
263+ fix_header (Path (HEADER_DIR , "SDL_config_macosx.h" ))
268264 include_dirs .append (HEADER_DIR )
269- extra_link_args += ["-F%s /.." % SDL2_BUNDLE_PATH ]
270- extra_link_args += ["-rpath" , "%s /.." % SDL2_BUNDLE_PATH ]
265+ extra_link_args += [f "-F{ SDL2_BUNDLE_PATH } /.." ]
266+ extra_link_args += ["-rpath" , f" { SDL2_BUNDLE_PATH } /.." ]
271267 extra_link_args += ["-rpath" , "/usr/local/opt/llvm/lib/" ]
272268
273269 # Fix "implicit declaration of function 'close'" in zlib.
@@ -309,7 +305,7 @@ def fix_header(filepath: str) -> None:
309305 ffi .cdef (include .header )
310306 except Exception :
311307 # Print the source, for debugging.
312- print ("Error with: %s" % include .path )
308+ print (f "Error with: { include .path } " )
313309 for i , line in enumerate (include .header .split ("\n " ), 1 ):
314310 print ("%03i %s" % (i , line ))
315311 raise
@@ -374,8 +370,8 @@ def parse_sdl_attrs(prefix: str, all_names: List[str]) -> Tuple[str, str]:
374370 lookup = []
375371 for name , value in sorted (find_sdl_attrs (prefix ), key = lambda item : item [1 ]):
376372 all_names .append (name )
377- names .append ("%s = %s" % ( name , value ) )
378- lookup .append ('%s : "%s"' % ( value , name ) )
373+ names .append (f" { name } = { value } " )
374+ lookup .append (f' { value } : "{ name } "' )
379375 return "\n " .join (names ), "{\n %s,\n }" % (",\n " .join (lookup ),)
380376
381377
@@ -408,10 +404,10 @@ def update_module_all(filename: str, new_all: str) -> None:
408404 )
409405 with open (filename , "r" , encoding = "utf-8" ) as f :
410406 match = RE_CONSTANTS_ALL .match (f .read ())
411- assert match , "Can't determine __all__ subsection in %s!" % ( filename ,)
407+ assert match , f "Can't determine __all__ subsection in { filename } !"
412408 header , footer = match .groups ()
413409 with open (filename , "w" , encoding = "utf-8" ) as f :
414- f .write ("%s \n %s ,\n %s" % ( header , new_all , footer ) )
410+ f .write (f" { header } \n { new_all } ,\n { footer } " )
415411
416412
417413def generate_enums (prefix : str ) -> Iterator [str ]:
@@ -446,14 +442,14 @@ def write_library_constants() -> None:
446442 value = getattr (lib , name )
447443 if name [:5 ] == "TCOD_" :
448444 if name .isupper (): # const names
449- f .write ("%s = %r \n " % ( name [5 :], value ) )
445+ f .write (f" { name [5 :]} = { value !r } \n " )
450446 all_names .append (name [5 :])
451447 elif name .startswith ("FOV" ): # fov const names
452- f .write ("%s = %r \n " % ( name , value ) )
448+ f .write (f" { name } = { value !r } \n " )
453449 all_names .append (name )
454450 elif name [:6 ] == "TCODK_" : # key name
455- f .write ("KEY_%s = %r \n " % ( name [6 :], value ) )
456- all_names .append ("KEY_%s" % name [6 :])
451+ f .write (f "KEY_{ name [6 :]} = { value !r } \n " )
452+ all_names .append (f "KEY_{ name [6 :]} " )
457453
458454 f .write ("\n # --- colors ---\n " )
459455 for name in dir (lib ):
@@ -465,11 +461,11 @@ def write_library_constants() -> None:
465461 if ffi .typeof (value ) != ffi .typeof ("TCOD_color_t" ):
466462 continue
467463 color = tcod .color .Color ._new_from_cdata (value )
468- f .write ("%s = %r \n " % ( name [5 :], color ) )
464+ f .write (f" { name [5 :]} = { color !r } \n " )
469465 all_names .append (name [5 :])
470466
471- all_names_merged = ",\n " .join ('"%s"' % name for name in all_names )
472- f .write ("\n __all__ = [\n %s ,\n ]\n " % ( all_names_merged ,) )
467+ all_names_merged = ",\n " .join (f'" { name } "' for name in all_names )
468+ f .write (f "\n __all__ = [\n { all_names_merged } ,\n ]\n " )
473469 update_module_all ("tcod/__init__.py" , all_names_merged )
474470 update_module_all ("tcod/libtcodpy.py" , all_names_merged )
475471
@@ -487,11 +483,10 @@ def write_library_constants() -> None:
487483
488484 f .write ("\n # --- SDL wheel ---\n " )
489485 f .write ("%s\n _REVERSE_WHEEL_TABLE = %s\n " % parse_sdl_attrs ("SDL_MOUSEWHEEL" , all_names ))
490- all_names_merged = ",\n " .join ('"%s"' % name for name in all_names )
491- f .write ("\n __all__ = [\n %s ,\n ]\n " % ( all_names_merged ,) )
486+ all_names_merged = ",\n " .join (f'" { name } "' for name in all_names )
487+ f .write (f "\n __all__ = [\n { all_names_merged } ,\n ]\n " )
492488
493- with open ("tcod/event.py" , "r" , encoding = "utf-8" ) as f :
494- event_py = f .read ()
489+ event_py = Path ("tcod/event.py" ).read_text (encoding = "utf-8" )
495490
496491 event_py = re .sub (
497492 r"(?<=# --- SDL scancodes ---\n ).*?(?=\n # --- end ---\n)" ,
@@ -506,8 +501,7 @@ def write_library_constants() -> None:
506501 flags = re .DOTALL ,
507502 )
508503
509- with open ("tcod/event.py" , "w" , encoding = "utf-8" ) as f :
510- f .write (event_py )
504+ Path ("tcod/event.py" ).write_text (event_py , encoding = "utf-8" )
511505
512506
513507if __name__ == "__main__" :
0 commit comments