mirror of
https://github.com/Mas0nShi/typoraCracker.git
synced 2023-07-10 13:41:20 +08:00
Compare commits
3 Commits
auto-analy
...
1.2.3-x64-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0fc4d945fa | ||
|
|
08041c56e1 | ||
|
|
b3e809d2af |
33
README.md
33
README.md
@@ -1,11 +1,7 @@
|
|||||||
**typoraCracker STOPS MAINTENANCE NOW. [why](https://github.com/Mas0nShi/typoraCracker/issues/39#issuecomment-1083117056)?**
|
|
||||||
|
|
||||||
|
|
||||||
# typora Cracker
|
# typora Cracker
|
||||||
|
|
||||||

|

|
||||||

|

|
||||||
[](https://github.com/Mas0nShi/typoraCracker/actions/workflows/manual.yml)
|
|
||||||
|
|
||||||
A extract & decryption and pack & encryption tools for typora.
|
A extract & decryption and pack & encryption tools for typora.
|
||||||
|
|
||||||
@@ -21,28 +17,17 @@ ANY PROBLEMS ARISING FROM THIS WILL BE BORNE BY THE USER (YOU).
|
|||||||
```
|
```
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
- Supports Version 1.0.0 - 1.2.0 [(old archive)](https://github.com/Mas0nShi/typoraCracker/tree/backup-raw)
|
- Supports Version 1.0.0+ (At least for now.)
|
||||||
- Supports Version 1.2.+ [(Experimental archive)](https://github.com/Mas0nShi/typoraCracker/tree/master)
|
- tested fine in Windows, Ubuntu
|
||||||
|
|
||||||
## Support List
|
|
||||||
2022.4.4: Experimental support for automatic binary analysis to generate scripts. (v1.2.+)
|
|
||||||
|
|
||||||
| OS / ARCH | x86 | x64 | arm64 |
|
|
||||||
|:---------:|:---:|:---:|:-----:|
|
|
||||||
| win | ✅ | ✅ | ❌ |
|
|
||||||
| linux | ❌ | ❌ | ❌ |
|
|
||||||
| macOS | ❌ | ❌ | ❌ |
|
|
||||||
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
1. download in [Release Pages](https://github.com/Mas0nShi/typoraCracker/releases)
|
|
||||||
2. unzip
|
1. `pip install -r requirements.txt`
|
||||||
3. `pip install -r requirements.txt`
|
2. `python typora.py --help`
|
||||||
4. `python typora.py --help`
|
3. read and use.
|
||||||
5. read and use.
|
4. do something.
|
||||||
6. do something.
|
5. pack and replace app.asar.
|
||||||
7. pack and replace app.asar.
|
6. enjoy it.
|
||||||
8. enjoy it.
|
|
||||||
|
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|||||||
31
README_CN.md
31
README_CN.md
@@ -1,6 +1,3 @@
|
|||||||
**typoraCracker 停止维护. [为什么](https://github.com/Mas0nShi/typoraCracker/issues/39#issuecomment-1083117056)**
|
|
||||||
|
|
||||||
|
|
||||||
# typora Cracker
|
# typora Cracker
|
||||||
|
|
||||||
一个typora的解包&解密,打包&加密工具
|
一个typora的解包&解密,打包&加密工具
|
||||||
@@ -15,30 +12,18 @@
|
|||||||
```
|
```
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
- 版本 1.0.0 - 1.2.0 [(使用旧的存档)](https://github.com/Mas0nShi/typoraCracker/tree/backup-raw)
|
|
||||||
- 版本 1.2.+ [(使用测试存档)](https://github.com/Mas0nShi/typoraCracker/tree/master)
|
|
||||||
|
|
||||||
|
|
||||||
## Support List
|
|
||||||
2022.4.4: 支持自动化分析二进制文件并生成脚本(测试性功能 v1.2.+)
|
|
||||||
|
|
||||||
| OS / ARCH | x86 | x64 | arm64 |
|
|
||||||
|:---------:|:---:|:---:|:-----:|
|
|
||||||
| win | ✅ | ✅ | ❌ |
|
|
||||||
| linux | ❌ | ❌ | ❌ |
|
|
||||||
| macOS | ❌ | ❌ | ❌ |
|
|
||||||
|
|
||||||
|
- 支持版本1.0.0以上(至少现在是这样)
|
||||||
|
- 测试通过平台:Win/Ubuntu
|
||||||
|
|
||||||
## 食用方式
|
## 食用方式
|
||||||
|
|
||||||
1. 前往[Release Pages](https://github.com/Mas0nShi/typoraCracker/releases) 下载对应版本
|
1. `pip install -r requirements.txt`
|
||||||
2. 解压
|
2. `python typora.py --help`
|
||||||
3. 安装依赖:`pip install -r requirements.txt`
|
3. 阅读帮助文档及使用。
|
||||||
4. 使用说明:`python typora.py --help`
|
4. 做你想做的事。
|
||||||
5. 使用。
|
5. 打包并替换原目录下的 app.asar。
|
||||||
6. 做你想做的事。
|
6. 享受成果。
|
||||||
7. 打包并替换原目录下的 app.asar。
|
|
||||||
8. 享受成果。
|
|
||||||
|
|
||||||
|
|
||||||
## 示例
|
## 示例
|
||||||
|
|||||||
@@ -1,23 +0,0 @@
|
|||||||
from utils import get_version, download_file, extract_file, log
|
|
||||||
from config import DOWNLOAD_LINK
|
|
||||||
import os
|
|
||||||
|
|
||||||
BASE_DIR = os.path.dirname(__file__)
|
|
||||||
|
|
||||||
|
|
||||||
def run_version(download_os, download_arch):
|
|
||||||
from_url = DOWNLOAD_LINK[download_os][download_arch]
|
|
||||||
to_dir = os.path.join(BASE_DIR, f"{download_os}/{download_arch}")
|
|
||||||
|
|
||||||
download_path = os.path.join(to_dir, os.path.basename(from_url))
|
|
||||||
download_file(from_url, download_path)
|
|
||||||
extract_file(download_path, to_dir)
|
|
||||||
version = get_version(to_dir)
|
|
||||||
open(os.path.join(to_dir, "LATEST_VERSION"), "w").write(version)
|
|
||||||
log.success(f"{download_os}-{download_arch} the latest version is {version}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
run_version("win", "x64")
|
|
||||||
# run_version("win", "x86")
|
|
||||||
# run_version("linux", "x64")
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
# -*- coding:utf-8 -*-
|
|
||||||
"""
|
|
||||||
@Author: Mas0n
|
|
||||||
@File: config.py
|
|
||||||
@Time: 2022/4/4 19:50
|
|
||||||
@Desc: It's all about getting better.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
DOWNLOAD_LINK = {
|
|
||||||
"win": {
|
|
||||||
"x86": "https://typora.io/windows/typora-setup-ia32.exe",
|
|
||||||
"x64": "https://typora.io/windows/typora-setup-x64.exe",
|
|
||||||
"arm64": "https://typora.io/windows/typora-setup-arm64.exe",
|
|
||||||
},
|
|
||||||
"linux": {
|
|
||||||
"x64": "https://download.typora.io/linux/Typora-linux-x64.tar.gz",
|
|
||||||
"arm64": "https://download.typora.io/linux/Typora-linux-arm64.tar.gz",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
# -*- coding:utf-8 -*-
|
|
||||||
"""
|
|
||||||
@Author: Mas0n
|
|
||||||
@Name: typora_linux_x64_analysis
|
|
||||||
@Time: 2022/4/4 19:48
|
|
||||||
@Desc: It's all about getting better.
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
import r2pipe
|
|
||||||
|
|
||||||
|
|
||||||
def get_aes_key_and_iv(file_path):
|
|
||||||
r = r2pipe.open(file_path)
|
|
||||||
|
|
||||||
r.cmd("aaa")
|
|
||||||
regex = r.cmdj("axtj @@ str.base64")
|
|
||||||
assert len(regex) == 1
|
|
||||||
|
|
||||||
func = regex[0]["fcn_name"]
|
|
||||||
r.cmd(f"s {func}")
|
|
||||||
asm = r.cmdj("pdfj")['ops']
|
|
||||||
assert len(asm) != 0
|
|
||||||
|
|
||||||
if 'str.dip3' in json.dumps(asm):
|
|
||||||
r.cmd('s str.dip3 - 32')
|
|
||||||
data = r.cmdj('xj 48')
|
|
||||||
key = bytearray(data[0:32])
|
|
||||||
iv = bytearray(data[32:48])
|
|
||||||
else:
|
|
||||||
raise "need rewrite scripts for linux x64"
|
|
||||||
|
|
||||||
return key, iv
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
# -*- coding:utf-8 -*-
|
|
||||||
"""
|
|
||||||
@Author: Mas0n
|
|
||||||
@File: patch.py
|
|
||||||
@Time: 2022/4/3 18:36
|
|
||||||
@Desc: It's all about getting better.
|
|
||||||
"""
|
|
||||||
import utils
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
utils.win_x86_run()
|
|
||||||
utils.win_x64_run()
|
|
||||||
utils.linux_x64_run()
|
|
||||||
|
|
||||||
@@ -1,89 +0,0 @@
|
|||||||
# -*- coding:utf-8 -*-
|
|
||||||
"""
|
|
||||||
@Author: Mas0n
|
|
||||||
@File: utils.py
|
|
||||||
@Time: 2022/4/3 18:36
|
|
||||||
@Desc: It's all about getting better.
|
|
||||||
"""
|
|
||||||
from loguru import logger as log
|
|
||||||
from config import DOWNLOAD_LINK
|
|
||||||
import subprocess
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
BASE_DIR = os.path.dirname(__file__)
|
|
||||||
|
|
||||||
|
|
||||||
def get_version(to_path):
|
|
||||||
package_file_path = os.path.join(to_path, "app/resources/package.json")
|
|
||||||
package_info = open(package_file_path, "r").read()
|
|
||||||
package_obj = json.loads(package_info)
|
|
||||||
return package_obj["version"]
|
|
||||||
|
|
||||||
|
|
||||||
def download_file(from_link, to_path):
|
|
||||||
subprocess.check_call(["wget", from_link, "-O", to_path])
|
|
||||||
|
|
||||||
|
|
||||||
def extract_file(from_path, to_path):
|
|
||||||
subprocess.check_call(["innoextract", from_path, "-d", to_path])
|
|
||||||
|
|
||||||
|
|
||||||
def patch_file(_key, _iv, to_dir):
|
|
||||||
exports_file_path = os.path.join(BASE_DIR, "../exports.tar.gz")
|
|
||||||
save_dir = os.path.join(to_dir, "build")
|
|
||||||
if not os.path.exists(save_dir):
|
|
||||||
os.makedirs(save_dir)
|
|
||||||
|
|
||||||
subprocess.check_call(["tar", "-zxvf", exports_file_path, "-C", save_dir])
|
|
||||||
patch_file_path = os.path.join(save_dir, "typora.py")
|
|
||||||
content = open(patch_file_path, "r").read()
|
|
||||||
content = content.replace("{AES_KEY}", f"b''.fromhex('{_key}')")
|
|
||||||
content = content.replace("{AES_IV}", f"b''.fromhex('{_iv}')")
|
|
||||||
open(patch_file_path, "w").write(content)
|
|
||||||
|
|
||||||
|
|
||||||
def scheduler(func, basedir, link):
|
|
||||||
|
|
||||||
download_path = os.path.join(basedir, os.path.basename(link))
|
|
||||||
log.info(f"downloading from {link}")
|
|
||||||
download_file(link, download_path)
|
|
||||||
log.info("ready extract package")
|
|
||||||
|
|
||||||
extract_file(download_path, basedir)
|
|
||||||
log.info("preparation stage completed")
|
|
||||||
main_node_path = os.path.join(basedir, "app/resources/app.asar.unpacked/main.node")
|
|
||||||
log.info("auto analysis start")
|
|
||||||
key, iv = func.get_aes_key_and_iv(main_node_path)
|
|
||||||
log.success("analysis done")
|
|
||||||
|
|
||||||
patch_file(key.hex(), iv.hex(), basedir)
|
|
||||||
log.success("patch done")
|
|
||||||
|
|
||||||
|
|
||||||
def win_x64_run():
|
|
||||||
from win.x64 import analysis
|
|
||||||
dirs = os.path.join(BASE_DIR, "win/x64")
|
|
||||||
url = DOWNLOAD_LINK["win"]["x64"]
|
|
||||||
scheduler(func=analysis, basedir=dirs, link=url)
|
|
||||||
|
|
||||||
|
|
||||||
def win_x86_run():
|
|
||||||
from win.x86 import analysis
|
|
||||||
dirs = os.path.join(BASE_DIR, "win/x86")
|
|
||||||
url = DOWNLOAD_LINK["win"]["x86"]
|
|
||||||
scheduler(func=analysis, basedir=dirs, link=url)
|
|
||||||
|
|
||||||
|
|
||||||
def linux_x64_run():
|
|
||||||
from linux.x64 import analysis
|
|
||||||
dirs = os.path.join(BASE_DIR, "linux/x64")
|
|
||||||
url = DOWNLOAD_LINK["linux"]["x64"]
|
|
||||||
scheduler(func=analysis, basedir=dirs, link=url)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
win_x86_run()
|
|
||||||
win_x64_run()
|
|
||||||
linux_x64_run()
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
# -*- coding:utf-8 -*-
|
|
||||||
"""
|
|
||||||
@Author: Mas0n
|
|
||||||
@Name: typora_win_x64_analysis
|
|
||||||
@Time: 2022/4/3 18:26
|
|
||||||
@Desc: It's all about getting better.
|
|
||||||
"""
|
|
||||||
import struct
|
|
||||||
import r2pipe
|
|
||||||
|
|
||||||
|
|
||||||
def regex_key_iv(asm_obj):
|
|
||||||
asm_regex = []
|
|
||||||
for body in asm_obj:
|
|
||||||
if "=[4]" in body["esil"] and body['type'] == 'mov':
|
|
||||||
opcode, value = body["disasm"].split(", ")
|
|
||||||
if "0x" in value:
|
|
||||||
asm_regex.append({"opcode": opcode, "value": value})
|
|
||||||
return asm_regex
|
|
||||||
|
|
||||||
|
|
||||||
def get_aes_key_and_iv(file_path):
|
|
||||||
r = r2pipe.open(file_path)
|
|
||||||
r.cmd("aaa")
|
|
||||||
regex = r.cmdj("axtj @@ str.base64")
|
|
||||||
assert len(regex) == 1
|
|
||||||
|
|
||||||
func = regex[0]["fcn_name"]
|
|
||||||
r.cmd(f"s {func}")
|
|
||||||
asm = r.cmdj("pdfj")['ops']
|
|
||||||
assert len(asm) != 0
|
|
||||||
|
|
||||||
asm_regex = regex_key_iv(asm)
|
|
||||||
assert len(asm_regex) == 12
|
|
||||||
|
|
||||||
iv = struct.pack("<4L", *[int(asm_regex[i]['value'], 16) for i in range(4)])
|
|
||||||
key = struct.pack("<8L", *[int(asm_regex[i]['value'], 16) for i in range(4, 12)])
|
|
||||||
return key, iv
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
# -*- coding:utf-8 -*-
|
|
||||||
"""
|
|
||||||
@Author: Mas0n
|
|
||||||
@Name: typora_win_x86_analysis
|
|
||||||
@Time: 2022/4/3 18:36
|
|
||||||
@Desc: It's all about getting better.
|
|
||||||
"""
|
|
||||||
import struct
|
|
||||||
import r2pipe
|
|
||||||
|
|
||||||
|
|
||||||
def regex_key_iv(asm_obj):
|
|
||||||
asm_regex = []
|
|
||||||
for body in asm_obj:
|
|
||||||
if "=[4]" in body["esil"] and body['type'] == 'mov':
|
|
||||||
opcode, value = body["disasm"].split(", ")
|
|
||||||
if "0x" in value:
|
|
||||||
asm_regex.append({"opcode": opcode, "value": value})
|
|
||||||
return asm_regex
|
|
||||||
|
|
||||||
|
|
||||||
def get_aes_key_and_iv(file_path):
|
|
||||||
r = r2pipe.open(file_path)
|
|
||||||
r.cmd("aaa")
|
|
||||||
regex = r.cmdj("axtj @@ str.base64")
|
|
||||||
assert len(regex) == 1
|
|
||||||
|
|
||||||
func = regex[0]["fcn_name"]
|
|
||||||
r.cmd(f"s {func}")
|
|
||||||
asm = r.cmdj("pdfj")['ops']
|
|
||||||
assert len(asm) != 0
|
|
||||||
|
|
||||||
asm_regex = regex_key_iv(asm)
|
|
||||||
|
|
||||||
iv = struct.pack("<4L", *[int(asm_regex[i]['value'], 16) for i in range(4)])
|
|
||||||
|
|
||||||
# find the set key func
|
|
||||||
call_regex = [i for i in asm if i['size'] == 5 and i['type'] == 'call']
|
|
||||||
r.cmd(f"s {call_regex[1]['jump']}")
|
|
||||||
asm = r.cmdj("pdfj")["ops"]
|
|
||||||
asm_regex = regex_key_iv(asm)
|
|
||||||
assert len(asm_regex) == 8
|
|
||||||
|
|
||||||
key = struct.pack("<8L", *[int(asm_regex[i]['value'], 16) for i in range(8)])
|
|
||||||
return key, iv
|
|
||||||
BIN
exports.tar.gz
BIN
exports.tar.gz
Binary file not shown.
176
masar.py
Normal file
176
masar.py
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
"""
|
||||||
|
@Author: Mas0n
|
||||||
|
@File: masar.py
|
||||||
|
@Time: 2021-11-29 22:34
|
||||||
|
@Desc: It's all about getting better.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import errno
|
||||||
|
import io
|
||||||
|
import struct
|
||||||
|
import shutil
|
||||||
|
import fileinput
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
def round_up(i, m):
|
||||||
|
return (i + m - 1) & ~(m - 1)
|
||||||
|
|
||||||
|
|
||||||
|
class Asar:
|
||||||
|
def __init__(self, path, fp, header, base_offset):
|
||||||
|
self.path = path
|
||||||
|
self.fp = fp
|
||||||
|
self.header = header
|
||||||
|
self.base_offset = base_offset
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def open(cls, path):
|
||||||
|
fp = open(path, 'rb')
|
||||||
|
data_size, header_size, header_object_size, header_string_size = struct.unpack('<4I', fp.read(16))
|
||||||
|
header_json = fp.read(header_string_size).decode('utf-8')
|
||||||
|
return cls(
|
||||||
|
path=path,
|
||||||
|
fp=fp,
|
||||||
|
header=json.loads(header_json),
|
||||||
|
base_offset=round_up(16 + header_string_size, 4)
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def compress(cls, path):
|
||||||
|
offset = 0
|
||||||
|
paths = []
|
||||||
|
|
||||||
|
def _path_to_dict(path):
|
||||||
|
nonlocal offset, paths
|
||||||
|
result = {'files': {}}
|
||||||
|
for f in os.scandir(path):
|
||||||
|
if os.path.isdir(f.path):
|
||||||
|
result['files'][f.name] = _path_to_dict(f.path)
|
||||||
|
elif f.is_symlink():
|
||||||
|
result['files'][f.name] = {
|
||||||
|
'link': os.path.realpath(f.name)
|
||||||
|
}
|
||||||
|
# modify
|
||||||
|
elif f.name == "main.node":
|
||||||
|
size = f.stat().st_size
|
||||||
|
result['files'][f.name] = {
|
||||||
|
'size': size,
|
||||||
|
"unpacked": True
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
paths.append(f.path)
|
||||||
|
size = f.stat().st_size
|
||||||
|
result['files'][f.name] = {
|
||||||
|
'size': size,
|
||||||
|
'offset': str(offset)
|
||||||
|
}
|
||||||
|
offset += size
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _paths_to_bytes(paths):
|
||||||
|
_bytes = io.BytesIO()
|
||||||
|
with fileinput.FileInput(files=paths, mode="rb") as f:
|
||||||
|
for i in f:
|
||||||
|
_bytes.write(i)
|
||||||
|
return _bytes.getvalue()
|
||||||
|
|
||||||
|
header = _path_to_dict(path)
|
||||||
|
header_json = json.dumps(header, sort_keys=True, separators=(',', ':')).encode('utf-8')
|
||||||
|
header_string_size = len(header_json)
|
||||||
|
data_size = 4
|
||||||
|
aligned_size = round_up(header_string_size, data_size)
|
||||||
|
header_size = aligned_size + 8
|
||||||
|
header_object_size = aligned_size + data_size
|
||||||
|
diff = aligned_size - header_string_size
|
||||||
|
header_json = header_json + b'\0' * diff if diff else header_json
|
||||||
|
fp = io.BytesIO()
|
||||||
|
fp.write(struct.pack('<4I', data_size, header_size, header_object_size, header_string_size))
|
||||||
|
fp.write(header_json)
|
||||||
|
fp.write(_paths_to_bytes(paths))
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
path=path,
|
||||||
|
fp=fp,
|
||||||
|
header=header,
|
||||||
|
base_offset=round_up(16 + header_string_size, 4))
|
||||||
|
|
||||||
|
def _copy_unpacked_file(self, source, destination):
|
||||||
|
unpacked_dir = self.path + '.unpacked'
|
||||||
|
if not os.path.isdir(unpacked_dir):
|
||||||
|
print("Couldn't copy file {}, no extracted directory".format(source))
|
||||||
|
return
|
||||||
|
|
||||||
|
src = os.path.join(unpacked_dir, source)
|
||||||
|
if not os.path.exists(src):
|
||||||
|
print("Couldn't copy file {}, doesn't exist".format(src))
|
||||||
|
return
|
||||||
|
|
||||||
|
dest = os.path.join(destination, source)
|
||||||
|
shutil.copyfile(src, dest)
|
||||||
|
|
||||||
|
def _extract_file(self, source, info, destination):
|
||||||
|
if 'offset' not in info:
|
||||||
|
self._copy_unpacked_file(source, destination)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.fp.seek(self.base_offset + int(info['offset']))
|
||||||
|
r = self.fp.read(int(info['size']))
|
||||||
|
|
||||||
|
dest = os.path.join(destination, source)
|
||||||
|
with open(dest, 'wb') as f:
|
||||||
|
f.write(r)
|
||||||
|
|
||||||
|
def _extract_link(self, source, link, destination):
|
||||||
|
dest_filename = os.path.normpath(os.path.join(destination, source))
|
||||||
|
link_src_path = os.path.dirname(os.path.join(destination, link))
|
||||||
|
link_to = os.path.join(link_src_path, os.path.basename(link))
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.symlink(link_to, dest_filename)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.EXIST:
|
||||||
|
os.unlink(dest_filename)
|
||||||
|
os.symlink(link_to, dest_filename)
|
||||||
|
else:
|
||||||
|
raise e
|
||||||
|
|
||||||
|
def _extract_directory(self, source, files, destination):
|
||||||
|
dest = os.path.normpath(os.path.join(destination, source))
|
||||||
|
|
||||||
|
if not os.path.exists(dest):
|
||||||
|
os.makedirs(dest)
|
||||||
|
|
||||||
|
for name, info in files.items():
|
||||||
|
item_path = os.path.join(source, name)
|
||||||
|
|
||||||
|
if 'files' in info:
|
||||||
|
self._extract_directory(item_path, info['files'], destination)
|
||||||
|
elif 'link' in info:
|
||||||
|
self._extract_link(item_path, info['link'], destination)
|
||||||
|
else:
|
||||||
|
self._extract_file(item_path, info, destination)
|
||||||
|
|
||||||
|
def extract(self, path):
|
||||||
|
if not os.path.isdir(path):
|
||||||
|
raise NotADirectoryError()
|
||||||
|
self._extract_directory('.', self.header['files'], path)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
self.fp.close()
|
||||||
|
|
||||||
|
|
||||||
|
def pack_asar(source, dest):
|
||||||
|
with Asar.compress(source) as a:
|
||||||
|
with open(dest, 'wb') as fp:
|
||||||
|
a.fp.seek(0)
|
||||||
|
fp.write(a.fp.read())
|
||||||
|
|
||||||
|
|
||||||
|
def extract_asar(source, dest):
|
||||||
|
with Asar.open(source) as a:
|
||||||
|
a.extract(dest)
|
||||||
4
requirements.txt
Normal file
4
requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
jsbeautifier==1.14.0
|
||||||
|
jsmin==3.0.0
|
||||||
|
loguru==0.5.3
|
||||||
|
pycryptodome==3.11.0
|
||||||
183
typora.py
Normal file
183
typora.py
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
"""
|
||||||
|
@Author: Mas0n
|
||||||
|
@File: typora.py
|
||||||
|
@Time: 2021-11-29 21:24
|
||||||
|
@Desc: It's all about getting better.
|
||||||
|
"""
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
from Crypto.Util.Padding import pad, unpad
|
||||||
|
from base64 import b64decode, b64encode
|
||||||
|
from jsbeautifier import beautify
|
||||||
|
from jsmin import jsmin
|
||||||
|
from os import listdir, urandom, makedirs
|
||||||
|
from os.path import isfile, isdir, join as pjoin, split as psplit, exists, abspath
|
||||||
|
from loguru import logger as log
|
||||||
|
from masar import extract_asar, pack_asar
|
||||||
|
from shutil import rmtree
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
import struct
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# DEBUG
|
||||||
|
DEBUG = False
|
||||||
|
|
||||||
|
log.remove()
|
||||||
|
if DEBUG:
|
||||||
|
log.add(sys.stderr, level="DEBUG")
|
||||||
|
else:
|
||||||
|
log.add(sys.stderr, level="INFO")
|
||||||
|
|
||||||
|
AES_KEY = struct.pack("<4Q", *[0x5CA1FF4578961A92, 0x73FD782E4D01350F, 0x031E97ED94CF8462, 0x21977F64C78BC7D6])
|
||||||
|
AES_IV = struct.pack("<4L", *[0x33706964, 0x5387CD15, 0xD05F336D, 0x53F82468])
|
||||||
|
|
||||||
|
|
||||||
|
def _mkDir(_path):
|
||||||
|
if not exists(_path):
|
||||||
|
makedirs(_path)
|
||||||
|
else:
|
||||||
|
if _path == psplit(__file__)[0]:
|
||||||
|
log.warning("plz try not to use the root dir.")
|
||||||
|
else:
|
||||||
|
log.warning(f"May FolderExists: {_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def decScript(b64: bytes, prettify: bool):
|
||||||
|
lCode = b64decode(b64)
|
||||||
|
# iv
|
||||||
|
aesIv = AES_IV
|
||||||
|
# cipher text
|
||||||
|
cipherText = lCode[:]
|
||||||
|
# AES 256 CBC
|
||||||
|
ins = AES.new(key=AES_KEY, iv=aesIv, mode=AES.MODE_CBC)
|
||||||
|
code = unpad(ins.decrypt(cipherText), 16, 'pkcs7')
|
||||||
|
if prettify:
|
||||||
|
code = beautify(code.decode()).encode()
|
||||||
|
return code
|
||||||
|
|
||||||
|
|
||||||
|
def extractWdec(asarPath, path, prettify):
|
||||||
|
"""
|
||||||
|
:param prettify: bool
|
||||||
|
:param asarPath: asar out dir
|
||||||
|
:param path: out dir
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
# try to create empty dir to save extract files
|
||||||
|
path = pjoin(path, "typoraCrackerTemp")
|
||||||
|
|
||||||
|
if exists(path):
|
||||||
|
rmtree(path)
|
||||||
|
_mkDir(path)
|
||||||
|
|
||||||
|
log.info(f"extract asar file: {asarPath}")
|
||||||
|
# extract app.asar to {path}/*
|
||||||
|
extract_asar(asarPath, path)
|
||||||
|
log.success(f"extract ended.")
|
||||||
|
|
||||||
|
log.info(f"read Directory: {path}")
|
||||||
|
# construct the save directory {pathRoot}/dec_app
|
||||||
|
outPath = pjoin(psplit(path)[0], "dec_app")
|
||||||
|
# try to create empty dir to save decryption files
|
||||||
|
if exists(outPath):
|
||||||
|
rmtree(outPath)
|
||||||
|
_mkDir(outPath)
|
||||||
|
|
||||||
|
log.info(f"set Directory: {outPath}")
|
||||||
|
# enumerate extract files
|
||||||
|
fileArr = listdir(path)
|
||||||
|
for name in fileArr:
|
||||||
|
# read files content
|
||||||
|
fpath = pjoin(path, name)
|
||||||
|
scode = open(fpath, "rb").read()
|
||||||
|
log.info(f"open file: {name}")
|
||||||
|
# if file suffix is *.js then decryption file
|
||||||
|
if isfile(fpath) and name.endswith(".js"):
|
||||||
|
scode = decScript(scode, prettify)
|
||||||
|
else:
|
||||||
|
log.debug(f"skip file: {name}")
|
||||||
|
# save content {outPath}/{name}
|
||||||
|
open(pjoin(outPath, name), "wb").write(scode)
|
||||||
|
log.success(f"decrypt and save file: {name}")
|
||||||
|
|
||||||
|
rmtree(path)
|
||||||
|
log.debug("remove temp dir")
|
||||||
|
|
||||||
|
|
||||||
|
def encScript(_code: bytes, compress):
|
||||||
|
if compress:
|
||||||
|
_code = jsmin(_code.decode(), quote_chars="'\"`").encode()
|
||||||
|
aesIv = AES_IV
|
||||||
|
cipherText = _code
|
||||||
|
ins = AES.new(key=AES_KEY, iv=aesIv, mode=AES.MODE_CBC)
|
||||||
|
enc = ins.encrypt(pad(cipherText, 16, 'pkcs7'))
|
||||||
|
lCode = b64encode(enc)
|
||||||
|
return lCode
|
||||||
|
|
||||||
|
|
||||||
|
def packWenc(path, outPath, compress):
|
||||||
|
"""
|
||||||
|
:param path: out dir
|
||||||
|
:param outPath: pack path app.asar
|
||||||
|
:param compress: Bool
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
# check out path
|
||||||
|
if isfile(outPath):
|
||||||
|
log.error("plz input Directory for app.asar")
|
||||||
|
raise NotADirectoryError
|
||||||
|
|
||||||
|
_mkDir(outPath)
|
||||||
|
|
||||||
|
encFilePath = pjoin(psplit(outPath)[0], "typoraCrackerTemp")
|
||||||
|
if exists(encFilePath):
|
||||||
|
rmtree(encFilePath)
|
||||||
|
_mkDir(encFilePath)
|
||||||
|
|
||||||
|
outFilePath = pjoin(outPath, "app.asar")
|
||||||
|
log.info(f"set outFilePath: {outFilePath}")
|
||||||
|
fileArr = listdir(path)
|
||||||
|
|
||||||
|
for name in fileArr:
|
||||||
|
fpath = pjoin(path, name)
|
||||||
|
if isdir(fpath):
|
||||||
|
log.error("TODO: found folder")
|
||||||
|
raise IsADirectoryError
|
||||||
|
|
||||||
|
scode = open(fpath, "rb").read()
|
||||||
|
log.info(f"open file: {name}")
|
||||||
|
if isfile(fpath) and name.endswith(".js"):
|
||||||
|
scode = encScript(scode, compress)
|
||||||
|
|
||||||
|
open(pjoin(encFilePath, name), "wb").write(scode)
|
||||||
|
log.success(f"encrypt and save file: {name}")
|
||||||
|
|
||||||
|
log.info("ready to pack")
|
||||||
|
pack_asar(encFilePath, outFilePath)
|
||||||
|
log.success("pack done")
|
||||||
|
|
||||||
|
rmtree(encFilePath)
|
||||||
|
log.debug("remove temp dir")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
argParser = ArgumentParser(
|
||||||
|
description="[extract and decryption / pack and encryption] app.asar file from [Typora].",
|
||||||
|
epilog="If you have any questions, please contact [ MasonShi@88.com ]")
|
||||||
|
argParser.add_argument("asarPath", type=str, help="app.asar file path/dir [input/ouput]")
|
||||||
|
argParser.add_argument("dirPath", type=str, help="as tmp and out directory.")
|
||||||
|
|
||||||
|
argParser.add_argument('-u', dest='mode', action='store_const',
|
||||||
|
const=packWenc, default=extractWdec,
|
||||||
|
help='pack & encryption (default: extract & decryption)')
|
||||||
|
argParser.add_argument('-f', dest='format', action='store_const',
|
||||||
|
const=True, default=False,
|
||||||
|
help='enabled prettify/compress (default: disabled)')
|
||||||
|
args = argParser.parse_args()
|
||||||
|
|
||||||
|
args.mode(args.asarPath, args.dirPath, args.format)
|
||||||
|
log.success("Done!")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user