 53435579b3
			
		
	
	
		53435579b3
		
			
		
	
	
	
	
		
			
			* fbt, faploader: minimal app module implementation * faploader, libs: moved API hashtable core to flipper_application * example: compound api * lib: flipper_application: naming fixes, doxygen comments * fbt: changed `requires` manifest field behavior for app extensions * examples: refactored plugin apps; faploader: changed new API naming; fbt: changed PLUGIN app type meaning * loader: dropped support for debug apps & plugin menus * moved applications/plugins -> applications/external * Restored x bit on chiplist_convert.py * git: fixed free-dap submodule path * pvs: updated submodule paths * examples: example_advanced_plugins.c: removed potential memory leak on errors * examples: example_plugins: refined requires * fbt: not deploying app modules for debug/sample apps; extra validation for .PLUGIN-type apps * apps: removed cdefines for external apps * fbt: moved ext app path definition * fbt: reworked fap_dist handling; f18: synced api_symbols.csv * fbt: removed resources_paths for extapps * scripts: reworked storage * scripts: reworked runfap.py & selfupdate.py to use new api * wip: fal runner * fbt: moved file packaging into separate module * scripts: storage: fixes * scripts: storage: minor fixes for new api * fbt: changed internal artifact storage details for external apps * scripts: storage: additional fixes and better error reporting; examples: using APP_DATA_PATH() * fbt, scripts: reworked launch_app to deploy plugins; moved old runfap.py to distfap.py * fbt: extra check for plugins descriptors * fbt: additional checks in emitter * fbt: better info message on SDK rebuild * scripts: removed requirements.txt * loader: removed remnants of plugins & debug menus * post-review fixes
		
			
				
	
	
		
			109 lines
		
	
	
		
			3.3 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			109 lines
		
	
	
		
			3.3 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
| import os
 | |
| import hashlib
 | |
| import struct
 | |
| from typing import TypedDict
 | |
| 
 | |
| 
 | |
| class File(TypedDict):
 | |
|     path: str
 | |
|     size: int
 | |
|     content_path: str
 | |
| 
 | |
| 
 | |
| class Dir(TypedDict):
 | |
|     path: str
 | |
| 
 | |
| 
 | |
| class FileBundler:
 | |
|     """
 | |
|     u32 magic
 | |
|     u32 version
 | |
|     u32 dirs_count
 | |
|     u32 files_count
 | |
|     u32 signature_size
 | |
|     u8[] signature
 | |
|     Dirs:
 | |
|       u32 dir_name length
 | |
|       u8[] dir_name
 | |
|     Files:
 | |
|       u32 file_name length
 | |
|       u8[] file_name
 | |
|       u32 file_content_size
 | |
|       u8[] file_content
 | |
|     """
 | |
| 
 | |
|     def __init__(self, directory_path: str):
 | |
|         self.directory_path = directory_path
 | |
|         self.file_list: list[File] = []
 | |
|         self.directory_list: list[Dir] = []
 | |
|         self._gather()
 | |
| 
 | |
|     def _gather(self):
 | |
|         for root, dirs, files in os.walk(self.directory_path):
 | |
|             for file_info in files:
 | |
|                 file_path = os.path.join(root, file_info)
 | |
|                 file_size = os.path.getsize(file_path)
 | |
|                 self.file_list.append(
 | |
|                     {
 | |
|                         "path": os.path.relpath(file_path, self.directory_path),
 | |
|                         "size": file_size,
 | |
|                         "content_path": file_path,
 | |
|                     }
 | |
|                 )
 | |
| 
 | |
|             for dir_info in dirs:
 | |
|                 dir_path = os.path.join(root, dir_info)
 | |
|                 # dir_size = sum(
 | |
|                 #     os.path.getsize(os.path.join(dir_path, f)) for f in os.listdir(dir_path)
 | |
|                 # )
 | |
|                 self.directory_list.append(
 | |
|                     {
 | |
|                         "path": os.path.relpath(dir_path, self.directory_path),
 | |
|                     }
 | |
|                 )
 | |
| 
 | |
|         self.file_list.sort(key=lambda f: f["path"])
 | |
|         self.directory_list.sort(key=lambda d: d["path"])
 | |
| 
 | |
|     def export(self, target_path: str):
 | |
|         self._md5_hash = hashlib.md5()
 | |
|         with open(target_path, "wb") as f:
 | |
|             # Write header magic and version
 | |
|             f.write(struct.pack("<II", 0x4F4C5A44, 0x01))
 | |
| 
 | |
|             # Write dirs count
 | |
|             f.write(struct.pack("<I", len(self.directory_list)))
 | |
| 
 | |
|             # Write files count
 | |
|             f.write(struct.pack("<I", len(self.file_list)))
 | |
| 
 | |
|             md5_hash_size = len(self._md5_hash.digest())
 | |
| 
 | |
|             # write signature size and null signature, we'll fill it in later
 | |
|             f.write(struct.pack("<I", md5_hash_size))
 | |
|             signature_offset = f.tell()
 | |
|             f.write(b"\x00" * md5_hash_size)
 | |
| 
 | |
|             self._write_contents(f)
 | |
| 
 | |
|             f.seek(signature_offset)
 | |
|             f.write(self._md5_hash.digest())
 | |
| 
 | |
|     def _write_contents(self, f):
 | |
|         for dir_info in self.directory_list:
 | |
|             f.write(struct.pack("<I", len(dir_info["path"]) + 1))
 | |
|             f.write(dir_info["path"].encode("ascii") + b"\x00")
 | |
|             self._md5_hash.update(dir_info["path"].encode("ascii") + b"\x00")
 | |
| 
 | |
|         # Write files
 | |
|         for file_info in self.file_list:
 | |
|             f.write(struct.pack("<I", len(file_info["path"]) + 1))
 | |
|             f.write(file_info["path"].encode("ascii") + b"\x00")
 | |
|             f.write(struct.pack("<I", file_info["size"]))
 | |
|             self._md5_hash.update(file_info["path"].encode("ascii") + b"\x00")
 | |
| 
 | |
|             with open(file_info["content_path"], "rb") as content_file:
 | |
|                 content = content_file.read()
 | |
|                 f.write(content)
 | |
|                 self._md5_hash.update(content)
 |