2 Commits dd04e1ca1e ... ddb25dab2d

Author SHA1 Message Date
  somewhatlurker ddb25dab2d amd-tools: consolidate patching and farc (un)packing into AFT Shader Patcher, and switch to xdelta vcdiff generation for use with new Novidia 4 years ago
  somewhatlurker 9b1ab83b15 novidia: cleanup shader patching, fix crc formatting bug 4 years ago

+ 3 - 0
source-code/data/amd-tools/.gitignore

@@ -0,0 +1,3 @@
+venv/*
+*.farc
+*.vcdiff

source-code/data/amd-tools/ARB Patcher Source/gamesettings/divaaft.py → source-code/data/amd-tools/AFT Shader Patcher/ARB Patcher/gamesettings/divaaft.py


+ 74 - 0
source-code/data/amd-tools/AFT Shader Patcher/ARB Patcher/gamesettings/divaaft_noskinning.py

@@ -0,0 +1,74 @@
+# gane-specific settings for diva arcade future tone
+GAME_NAME = "Project DIVA Arcade: Future Tone"
+
+from re import compile as recompile
+
+# some skinning can't be done due to using NV parameter buffers, so just disable it.
+VP_SKINNING_REGEX = recompile(r"(BUFFER4[\s\S]*?)MOV (.*?).w, 1; SUBC _tmp1, vertex.attrib\[15\], (-1|255); (.*?) IF NE.y; (.*?) MAD \2.xyz, _tmp0, vertex.attrib\[1\].y, \2; (.*?) MAD (.*?).xyz, _tmp0, vertex.attrib\[1\].y, \7; (.*?) MAD (.*?).xyz, _tmp0, vertex.attrib\[1\].y, \9; IF NE.z; (.*?) MAD \2.xyz, _tmp0, vertex.attrib\[1\].z, \2; (.*?) MAD \7.xyz, _tmp0, vertex.attrib\[1\].z, \7; (.*?) MAD \9.xyz, _tmp0, vertex.attrib\[1\].z, \9; IF NE.w; (.*?) MAD \2.xyz, _tmp0, vertex.attrib\[1\].w, \2; (.*?) MAD \7.xyz, _tmp0, vertex.attrib\[1\].w, \7; (.*?) MAD \9.xyz, _tmp0, vertex.attrib\[1\].w, \9; (?:ENDIF; ENDIF; ENDIF;|.*:)")
+VP_SKINNING_SUB = "\\1MOV \\2.w, 1; MOV \\2.xyz, vertex.position; MOV \\7, vertex.normal; MOV \\9, vertex.attrib[6];"
+
+VP_SKINNING_REGEX_2 = recompile(r"(BUFFER4[\s\S]*?)MOV (.*?).w, 1; SUBC _tmp1, vertex.attrib\[15\], (-1|255); (.*?) IF NE.y; (.*?) MAD \2.xyz, _tmp0, vertex.attrib\[1\].y, \2; (.*?) MAD (.*?).xyz, _tmp0, vertex.attrib\[1\].y, \7; IF NE.z; (.*?) MAD \2.xyz, _tmp0, vertex.attrib\[1\].z, \2; (.*?) MAD \7.xyz, _tmp0, vertex.attrib\[1\].z, \7; IF NE.w; (.*?) MAD \2.xyz, _tmp0, vertex.attrib\[1\].w, \2; (.*?) MAD \7.xyz, _tmp0, vertex.attrib\[1\].w, \7; (?:ENDIF; ENDIF; ENDIF;|.*:)")
+VP_SKINNING_SUB_2 = "\\1MOV \\2.w, 1; MOV \\2.xyz, vertex.position; MOV \\7, vertex.normal;"
+
+VP_SKINNING_REGEX_3 = recompile(r"(BUFFER4[\s\S]*?)MOV (.*?).w, 1; SUBC _tmp1, vertex.attrib\[15\], (-1|255); (.*?) IF NE.y; (.*?) MAD \2.xyz, _tmp0, vertex.attrib\[1\].y, \2; IF NE.z; (.*?) MAD \2.xyz, _tmp0, vertex.attrib\[1\].z, \2; IF NE.w; (.*?) MAD \2.xyz, _tmp0, vertex.attrib\[1\].w, \2; (?:ENDIF; ENDIF; ENDIF;|.*:)")
+VP_SKINNING_SUB_3 = "\\1MOV \\2.w, 1; MOV \\2.xyz, vertex.position;"
+
+
+# some kind of effect (scrunching up or stretching?) in tights shaders starting with 11 that partially uses skinning of normals...  let's just pretend it always results in 0
+TIGHTS_SKINNING_EFFECT_REGEX = recompile(r"(BUFFER4[\s\S]*?)SUBC .*?, vertex.attrib\[15\], .*?;\s*?SUBC1 tmp, vertex.attrib\[15\], .*?;[\s\S]*?XPD .*?, .*?, .*?;\s*?DP3_SAT (.*?), .*?, .*?;")
+TIGHTS_SKINNING_EFFECT_SUB = "\\1MOV \\2, 0;"
+
+
+# some skinning just needs some branching fixup for AMD BRA working different (needs a target per branch instruction apparently)
+SKIN_BRA_REGEX = recompile(r"BRA skinning_(?:\S*)end (\(.*?\.y\))*?\s*?;")
+SKIN_BRA_SUB = "BRA sk_end1 \\1;"
+SKIN_BRA_REGEX_2 = recompile(r"BRA skinning_(?:\S*)end (\(.*?\.z\))*?\s*?;")
+SKIN_BRA_SUB_2 = "BRA sk_end2 \\1;"
+SKIN_BRA_REGEX_3 = recompile(r"BRA skinning_(?:\S*)end (\(.*?\.w\))*?\s*?;")
+SKIN_BRA_SUB_3 = "BRA sk_end3 \\1;"
+
+SKIN_BRA_TGT_REGEX = recompile(r"skinning_(?:\S*)end:")
+SKIN_BRA_TGT_SUB = "\nsk_end1:\nsk_end2:\nsk_end3:\n"
+
+
+MATCH_FP_FNAME = recompile(r".*fp")
+MATCH_VP_FNAME = recompile(r".*vp")
+MATCH_ALL_FNAME = recompile(r".*")
+MATCH_FONT_FNAME = recompile(r"font.*fp")
+MATCH_TIGHTS_FNAME = recompile(r"tights.*vp")
+
+# fourth param: apply continuously until no more matches are found
+fix_repls = [
+    (MATCH_VP_FNAME, VP_SKINNING_REGEX, VP_SKINNING_SUB, False),
+    (MATCH_VP_FNAME, VP_SKINNING_REGEX_2, VP_SKINNING_SUB_2, False),
+    (MATCH_VP_FNAME, VP_SKINNING_REGEX_3, VP_SKINNING_SUB_3, False),
+    (MATCH_TIGHTS_FNAME, TIGHTS_SKINNING_EFFECT_REGEX, TIGHTS_SKINNING_EFFECT_SUB, False),
+    (MATCH_VP_FNAME, SKIN_BRA_REGEX, SKIN_BRA_SUB, False),
+    (MATCH_VP_FNAME, SKIN_BRA_REGEX_2, SKIN_BRA_SUB_2, False),
+    (MATCH_VP_FNAME, SKIN_BRA_REGEX_3, SKIN_BRA_SUB_3, False),
+    (MATCH_VP_FNAME, SKIN_BRA_TGT_REGEX, SKIN_BRA_TGT_SUB, False),
+]
+
+
+# use to open a differnt file instead of the original shader (eg. to replace with an easier-to-patch variant
+def filename_filter(fname):
+    if fname == 'sss_filter.130.fp':
+        return 'sss_filter.120.fp'
+    elif fname == 'sss_filter.131.fp':
+        return 'sss_filter.121.fp'
+    else:
+        return fname
+
+
+# use to tweak reasults after patching
+def post_filter(fname, f_full):
+    f_full = f_full.replace('\nBUFFER4', '\n#BUFFER4')
+    
+    # fix tex sampler offsets
+    # diva has something we can use for pixel size (program.local[0]), but it isn't in fonts so they get an approximation instead
+    if MATCH_FONT_FNAME.match(fname):
+        f_full = f_full.replace('{ 0.00078, 0.0014 }; ADD tex_offset_coord', '0.00035; ADD tex_offset_coord')
+    else:
+        f_full = f_full.replace('{ 0.00078, 0.0014 }; ADD tex_offset_coord', 'program.local[0]; ADD tex_offset_coord')
+        
+    return f_full

source-code/data/amd-tools/ARB Patcher Source/main.py → source-code/data/amd-tools/AFT Shader Patcher/ARB Patcher/main.py


source-code/data/amd-tools/ARB Patcher Source/rules.py → source-code/data/amd-tools/AFT Shader Patcher/ARB Patcher/rules.py


+ 117 - 0
source-code/data/amd-tools/AFT Shader Patcher/aft_shader_patcher.py

@@ -0,0 +1,117 @@
+import sys
+from os import chdir, makedirs, get_terminal_size
+from os.path import join as joinpath, splitext, isfile, exists, dirname, abspath
+import pyfarc
+import importlib, importlib.util
+
+if getattr(sys, 'frozen', False):
+    datadir = dirname(sys.executable)
+else:
+    datadir = dirname(__file__)
+
+arbpatcher_dir = joinpath(datadir, 'ARB Patcher')
+arbpatcher_gamesettings_dir = joinpath(arbpatcher_dir, 'gamesettings')
+
+sys.path = [arbpatcher_dir] + sys.path
+import main as ArbPatcher
+sys.path = sys.path[1:]
+
+game_settings_spec = importlib.util.spec_from_file_location('divaaft', joinpath(arbpatcher_gamesettings_dir, 'divaaft.py'))
+game_settings_module = importlib.util.module_from_spec(game_settings_spec)
+game_settings_spec.loader.exec_module(game_settings_module)
+
+
+# not bothering to check for main here because it's just a glue script anyway
+def get_args():    
+    import argparse
+    parser = argparse.ArgumentParser(description='AFT Shader Patcher: Patches Nvidia-only ARB shaders from PDAFT to work on AMD. Patching techniques from Nezarn; implementation by somewhatlurker.')
+    parser.add_argument('-i', '--in_farc', default='shader.farc', help='input shader farc file (default: "shader.farc")')
+    parser.add_argument('-o', '--out_farc', default='shader_patched.farc', help='output shader farc file (default: "shader_patched.farc")')
+    parser.add_argument('-c', '--compress', action='store_true', help='force use of farc compression (this mey produce larger vcdiff files)')
+    parser.add_argument('-x', '--xdelta', action='store_true', help='generate a vcdiff patch file')
+    
+    return parser.parse_args()
+
+args = get_args()
+
+print("AFT Shader Patcher")
+print("==================")
+print("Patching techniques from Nezarn; implementation by somewhatlurker")
+print("=================================================================")
+print("Input file: '{}'".format(args.in_farc))
+print("Output file: '{}'".format(args.out_farc))
+
+if not exists(args.in_farc):
+    print ("'{}' does not exist. Aborting".format(args.in_farc))
+    sys.exit()
+elif not isfile(args.in_farc):
+    print ("'{}' is not a file. Aborting".format(args.in_farc))
+    sys.exit()
+
+if exists(args.out_farc):
+    if not isfile(args.out_farc):
+        print ("'{}' already exists but is a directory. Aborting".format(args.out_farc))
+        sys.exit()
+
+
+with open(args.in_farc, 'rb') as f:
+    farcdata = pyfarc.from_stream(f)
+    if args.xdelta:
+        f.seek(0)
+        from binascii import crc32
+        in_farc_crc_str = '{:08x}'.format(crc32(f.read()))
+
+proc_count = 0
+    
+last_status_len = 0
+
+for fname in farcdata['files']:
+    proc_count += 1
+    progress_val = proc_count / len(farcdata['files'])
+    progress_cnt_X = int(progress_val * 20)
+    
+    status_str = '\r[{e:{s1}<{n1}}{e:{s2}<{n2}}]'.format(e='', s1='X', s2='-', n1=progress_cnt_X, n2=20-progress_cnt_X)
+    status_str += ' {:.2%}'.format(progress_val)
+    status_str += '   ' + fname
+    
+    try:
+        terminal_width = get_terminal_size()[0]
+    except:
+        terminal_width = 120
+    
+    if len(status_str) > terminal_width:
+        status_str = status_str[:terminal_width-3] + '...'
+    
+    # fix characters left on screen from a previous longer line
+    # (without cursor staying off to the side)
+    this_status_len = len(status_str)
+    if this_status_len < last_status_len:
+        status_str = '{: <{l}}'.format(status_str, l=last_status_len)
+    last_status_len = this_status_len
+    
+    print (status_str, end='')
+    
+    if game_settings_module and game_settings_module.filename_filter:
+        openname = game_settings_module.filename_filter(fname)
+    else:
+        openname = fname
+    
+    f_lines = farcdata['files'][openname]['data'].decode('utf-8').splitlines(keepends=True)
+    
+    f_full = ArbPatcher.patch_shader(fname, f_lines, game_settings_module, True)
+    
+    farcdata['files'][fname]['data'] = f_full.encode('utf-8')
+
+if args.compress:
+    farcdata['farc_type'] = 'FArC'
+
+with open(args.out_farc, 'wb') as f:
+    pyfarc.to_stream(farcdata, f, no_copy=True)
+
+if args.xdelta:
+    import subprocess
+    
+    # make paths absolute before running this
+    #args.in_farc = abspath(args.in_farc)
+    #args.out_farc = abspath(args.out_farc)
+    subprocess.run([joinpath(datadir, 'xdelta3.exe'), '-e', '-f', '-S', 'none', '-s', args.in_farc, args.out_farc, joinpath(dirname(args.out_farc), in_farc_crc_str + '.vcdiff')], )

+ 252 - 0
source-code/data/amd-tools/AFT Shader Patcher/pyfarc.py

@@ -0,0 +1,252 @@
+"""
+pyfarc reader and writer for farc archives
+supports Farc and FarC only
+"""
+
+from construct import Struct, Const, Int32ub, Int32sb, RepeatUntil, CString, Pointer, Bytes, Padding
+from copy import deepcopy
+import gzip
+
+_FArc_format = Struct(
+    "signature" / Const(b'FArc'),
+    "header_size" / Int32ub, # doesn't include signature or header_size
+    "alignment" / Int32sb,
+    "files" / RepeatUntil(lambda obj,lst,ctx: ctx._io.tell() - 7 > ctx.header_size, Struct(
+        "name" / CString("utf8"),
+        "pointer" / Int32ub,
+        "size" / Int32ub,
+        "data" / Pointer(lambda this: this.pointer, Bytes(lambda this: this.size))
+    )),
+    #Padding(lambda this: this.alignment - (this._io.tell() % this.alignment) if this._io.tell() % this.alignment else 0)
+)
+
+_FArC_format = Struct(
+    "signature" / Const(b'FArC'),
+    "header_size" / Int32ub, # doesn't include signature or header_size
+    "alignment" / Int32sb,
+    "files" / RepeatUntil(lambda obj,lst,ctx: ctx._io.tell() - 7 > ctx.header_size, Struct(
+        "name" / CString("utf8"),
+        "pointer" / Int32ub,
+        "compressed_size" / Int32ub,
+        "uncompressed_size" / Int32ub,
+        "data" / Pointer(lambda this: this.pointer, Bytes(lambda this: this.compressed_size))
+    )),
+    #Padding(lambda this: this.alignment - (this._io.tell() % this.alignment) if this._io.tell() % this.alignment else 0)
+)
+
+_farc_types = {
+    'FArc': {
+        'remarks': 'basic farc format',
+        'struct': _FArc_format,
+        'compression_support': False,
+        'compression_forced': False,
+        'fixed_header_size': 4,
+        'files_header_fields_size': 8,
+    },
+    'FArC': {
+        'remarks': 'farc with compression support',
+        'struct': _FArC_format,
+        'compression_support': True,
+        'compression_forced': True,
+        'fixed_header_size': 4,
+        'files_header_fields_size': 12,
+    },
+}
+
+class UnsupportedFarcTypeException(Exception):
+    pass
+
+def check_farc_type(t):
+    """Checks if a farc type is supported and returns a remarks string. Raises UnsupportedFarcTypeException if not supported."""
+    
+    if not t in _farc_types:
+        raise UnsupportedFarcTypeException("{} type not supported".format(t))
+    
+    return _farc_types[t]['remarks']
+
+
+def _files_header_size_calc(files, farc_type):
+    """Sums the size of the files header section for the given files and farc_type data."""
+    
+    size = 0
+    for fname, info in files.items():
+        size += len(fname) + 1
+        size += farc_type['files_header_fields_size']
+    return size
+
+def _prep_files(files, alignment, farc_type):
+    """Gets files ready for writing by compressing them and calculating pointers."""
+    
+    def _compress_files(files, farc_type):
+        for fname, info in files.items():
+            info['data_compressed'] = gzip.compress(info['data'], mtime=39) # set mtime for reproducible output
+            if (not farc_type['compression_forced']) and (len(info['data_compressed']) >= len(info['data'])):
+                info['data_compressed'] = info['data']
+       
+    def _set_files_pointers(files, alignment, farc_type):
+        pos = 8 + farc_type['fixed_header_size'] + _files_header_size_calc(files, farc_type)
+        
+        for fname, info in files.items():
+            if pos % alignment: pos += alignment - (pos % alignment)
+            info['pointer'] = pos
+            if 'data_compressed' in info:
+                pos += len(info['data_compressed'])
+            else:
+                pos += len(info['data'])
+    
+    if farc_type['compression_support']:
+        _compress_files(files, farc_type)
+    _set_files_pointers(files, alignment, farc_type)
+
+def to_bytes(data, alignment=1, no_copy=False):
+    """
+    Converts a farc dictionary (formatted like the dictionary returned by from_bytes) to an in-memory bytes object containing farc data.
+    
+    Set no_copy to True for a speedup and memory usage reduction if you don't mind your input data being contaminated.
+    """
+    
+    magic_str = data['farc_type']
+    check_farc_type(magic_str)
+    farc_type = _farc_types[magic_str]
+    
+    if no_copy:
+        files = data['files']
+    else:
+        files = deepcopy(data['files'])
+    _prep_files(files, alignment, farc_type)
+    
+    if farc_type['compression_support']:
+        return farc_type['struct'].build(dict(
+            header_size=farc_type['fixed_header_size'] + _files_header_size_calc(files, farc_type),
+            alignment=alignment,
+            files=[dict(
+                name=fname,
+                pointer=info['pointer'],
+                compressed_size=len(info['data_compressed']),
+                uncompressed_size=len(info['data']),
+                data=info['data_compressed']
+            ) for fname, info in files.items()]
+        ))
+    else:
+        return farc_type['struct'].build(dict(
+            header_size=farc_type['fixed_header_size'] + _files_header_size_calc(files, farc_type),
+            alignment=alignment,
+            files=[dict(
+                name=fname,
+                pointer=info['pointer'],
+                size=len(info['data']),
+                data=info['data']
+            ) for fname, info in files.items()]
+        ))
+
+def to_stream(data, stream, alignment=1, no_copy=False):
+    """
+    Converts a farc dictionary (formatted like the dictionary returned by from_stream) to farc data and writes it to a stream.
+    
+    Set no_copy to True for a speedup and memory usage reduction if you don't mind your input data being contaminated.
+    """
+    
+    magic_str = data['farc_type']
+    check_farc_type(magic_str)
+    farc_type = _farc_types[magic_str]
+    
+    if no_copy:
+        files = data['files']
+    else:
+        files = deepcopy(data['files'])
+    _prep_files(files, alignment, farc_type)
+    
+    if farc_type['compression_support']:
+        return farc_type['struct'].build_stream(dict(
+            header_size=farc_type['fixed_header_size'] + _files_header_size_calc(files, farc_type),
+            alignment=alignment,
+            files=[dict(
+                name=fname,
+                pointer=info['pointer'],
+                compressed_size=len(info['data_compressed']),
+                uncompressed_size=len(info['data']),
+                data=info['data_compressed']
+            ) for fname, info in files.items()]
+        ), stream)
+    else:
+        return farc_type['struct'].build_stream(dict(
+            header_size=farc_type['fixed_header_size'] + _files_header_size_calc(files, farc_type),
+            alignment=alignment,
+            files=[dict(
+                name=fname,
+                pointer=info['pointer'],
+                size=len(info['data']),
+                data=info['data']
+            ) for fname, info in files.items()]
+        ), stream)
+
+
+def _parsed_to_dict(farcdata, farc_type):
+    """Converts the raw construct data to our standard dictionary format."""
+    
+    files = {}
+    
+    if farc_type['compression_support']:
+        for f in farcdata['files']:
+            if farc_type['compression_forced'] or (f['uncompressed_size'] != f['compressed_size']):
+                data = gzip.decompress(f['data'])
+            else:
+                data = f['data']
+            files[f['name']] = {'data': data}
+    else:
+        for f in farcdata['files']:
+            data = f['data']
+            files[f['name']] = {'data': data}
+    
+    return {'farc_type': farcdata['signature'].decode('ascii'), 'files': files}
+
+def from_bytes(b):
+    """Converts farc data from bytes to a dictionary."""
+    
+    magic_str = b[:4].decode('ascii')
+    check_farc_type(magic_str)
+    farc_type = _farc_types[magic_str]
+    
+    farcdata = farc_type['struct'].parse(b)
+    return _parsed_to_dict(farcdata, farc_type)
+
+def from_stream(s):
+    """Converts farc data from a stream to a dictionary."""
+    
+    pos = s.tell()
+    magic_str = s.read(4).decode('ascii')
+    check_farc_type(magic_str)
+    farc_type = _farc_types[magic_str]
+    s.seek(pos)
+    
+    farcdata = farc_type['struct'].parse_stream(s)
+    return _parsed_to_dict(farcdata, farc_type)
+
+
+#test_farc = {'farc_type': 'FArc', 'files': {'aaa': {'data': b'test1'}, 'bbb': {'data': b'test2'}, 'ccc': {'data': b'aaaaaaaaaaaaaaaaaaaaaaaa'}}}
+test_farc = {'farc_type': 'FArC', 'files': {'aaa': {'data': b'test1'}, 'bbb': {'data': b'test2'}, 'ccc': {'data': b'aaaaaaaaaaaaaaaaaaaaaaaa'}}}
+#print (test_farc)
+
+#test_bytes = to_bytes(test_farc, alignment=16)
+#print (test_bytes)
+#print (from_bytes(test_bytes))
+
+#with open('test.farc', 'wb') as f:
+#    to_stream(test_farc, f, alignment=16)
+#with open('test.farc', 'rb') as f:
+#    print (from_stream(f))
+
+#with open('shader_amd.farc', 'rb') as f:
+#    shaderfarc = from_stream(f)
+#with open('shader_amd_out.farc', 'wb') as f:
+#    to_stream(shaderfarc, f, alignment=16, no_copy=True)
+
+#with open('shader_amd_compressed.farc', 'rb') as f:
+#    shaderfarc = from_stream(f)
+#with open('shader_amd_out_compressed.farc', 'wb') as f:
+#    to_stream(shaderfarc, f, alignment=1, no_copy=True)
+
+#with open('fontmap.farc', 'rb') as f:
+#    fontmapfarc = from_stream(f)
+#with open('fontmap_out.farc', 'wb') as f:
+#    to_stream(fontmapfarc, f, alignment=1, no_copy=True)

BIN
source-code/data/amd-tools/AFT Shader Patcher/xdelta3.exe


BIN
source-code/data/amd-tools/FarcPack.exe


+ 0 - 0
source-code/data/amd-tools/MikuMikuLibrary.dll


Some files were not shown because too many files changed in this diff