hebing.py 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. import json
  2. import hashlib
  3. import re
  4. import sys
  5. import os
  6. # 计算本地文件 fan.txt 的 md5
  7. def get_md5(filepath):
  8. md5 = hashlib.md5()
  9. with open(filepath, "rb") as f:
  10. while chunk := f.read(8192):
  11. md5.update(chunk)
  12. return md5.hexdigest()
  13. # 加载 JSON 文件
  14. def load_json(path):
  15. with open(path, "r", encoding="utf-8") as f:
  16. return json.load(f)
  17. # 保存 JSON 文件(折叠字典数组为单行,空数组和基础数组一行)
  18. class CompactJSONEncoder(json.JSONEncoder):
  19. def iterencode(self, o, _one_shot=False):
  20. def _compact_list(lst, indent_level):
  21. pad = ' ' * indent_level
  22. if not lst or all(isinstance(i, (str, int, float, bool, type(None))) for i in lst):
  23. return json.dumps(lst, ensure_ascii=False)
  24. if all(isinstance(i, dict) for i in lst):
  25. return '[\n' + ',\n'.join([pad + ' ' + json.dumps(i, ensure_ascii=False, separators=(',', ': ')) for i in lst]) + '\n' + pad + ']'
  26. return json.dumps(lst, ensure_ascii=False, indent=2)
  27. def _encode(obj, indent_level=0):
  28. pad = ' ' * indent_level
  29. if isinstance(obj, dict):
  30. lines = [f'"{k}": {_encode(v, indent_level+1)}' for k, v in obj.items()]
  31. return '{\n' + pad + ' ' + (',\n' + pad + ' ').join(lines) + '\n' + pad + '}'
  32. elif isinstance(obj, list):
  33. return _compact_list(obj, indent_level)
  34. return json.dumps(obj, ensure_ascii=False)
  35. return iter([_encode(o)])
  36. def save_json(data, path):
  37. with open(path, "w", encoding="utf-8") as f:
  38. json.dump(data, f, indent=2, ensure_ascii=False, cls=CompactJSONEncoder)
  39. print(f"✅ 已保存:{path}")
  40. # 插入 cleaned_sites 到目标 sites 中的目标条目之上
  41. def insert_sites(base_sites, cleaned_sites, key_marker="奇优"):
  42. for i, item in enumerate(base_sites):
  43. if item.get("key") == key_marker:
  44. return base_sites[:i] + cleaned_sites + base_sites[i:]
  45. print(f"⚠️ 未找到 key 为 {key_marker} 的插入点,追加到末尾")
  46. return base_sites + cleaned_sites
  47. if __name__ == "__main__":
  48. # 默认路径
  49. dianshi_path = "dianshi.json"
  50. cleaned_path = "tvbox_cleaned.json"
  51. # 覆盖默认路径(如果传了参数)
  52. if len(sys.argv) > 1:
  53. dianshi_path = sys.argv[1]
  54. if len(sys.argv) > 2:
  55. cleaned_path = sys.argv[2]
  56. try:
  57. # 获取 fan.txt 的 md5
  58. md5_value = get_md5("fan.txt")
  59. print(f"🔐 fan.txt 的 MD5: {md5_value}")
  60. # 加载两个 JSON 文件
  61. dianshi = load_json(dianshi_path)
  62. cleaned = load_json(cleaned_path)
  63. # 替换 spider md5
  64. if "spider" in dianshi:
  65. old_spider = dianshi["spider"]
  66. new_spider = re.sub(r'md5;[a-f0-9]+', f'md5;{md5_value}', old_spider)
  67. dianshi["spider"] = new_spider
  68. print(f"🔄 替换 spider 字段为: {new_spider}")
  69. else:
  70. print("⚠️ dianshi.json 中未找到 spider 字段")
  71. # 插入 sites
  72. cleaned_sites = cleaned.get("sites", [])
  73. dianshi["sites"] = insert_sites(dianshi.get("sites", []), cleaned_sites)
  74. name, ext = os.path.splitext(dianshi_path)
  75. output_path = f"{name}_merged{ext}"
  76. save_json(dianshi, output_path)
  77. # 保存最终合并文件
  78. # save_json(dianshi, "dianshi_merged.json")
  79. except Exception as e:
  80. print(f"❌ 出错: {e}")