mirror of
https://github.com/Mas0nShi/typoraCracker.git
synced 2023-07-10 13:41:20 +08:00
Compare commits
87 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ac472a198 | ||
|
|
c3ed1499cc | ||
|
|
637bc99b44 | ||
|
|
d294746a36 | ||
|
|
db41e7942c | ||
|
|
d8f253e0c8 | ||
|
|
19e73b6c79 | ||
|
|
e6b6d7a9e1 | ||
|
|
ea2e2b479c | ||
|
|
e562f37282 | ||
|
|
b8726b62ae | ||
|
|
e6d3d5aaff | ||
|
|
c1beef1a98 | ||
|
|
2e5bfcfb50 | ||
|
|
bab9df1f5f | ||
|
|
1c701eb7ae | ||
|
|
df4320c481 | ||
|
|
d16428a8b2 | ||
|
|
37cb084073 | ||
|
|
ce93881acd | ||
|
|
d13b7df457 | ||
|
|
8659ad77d8 | ||
|
|
f09cf7a8b5 | ||
|
|
591853fd67 | ||
|
|
8df8b7f18e | ||
|
|
6968976033 | ||
|
|
b582671923 | ||
|
|
ff414f6d79 | ||
|
|
3643ec2de3 | ||
|
|
a312537aa2 | ||
|
|
97a4f578b6 | ||
|
|
43b858c3ce | ||
|
|
df5bb4473f | ||
|
|
facaed285a | ||
|
|
937e3aa178 | ||
|
|
c984fd06c0 | ||
|
|
f340eef89c | ||
|
|
b8f870e55f | ||
|
|
4d635ec11f | ||
|
|
2d106c7e48 | ||
|
|
8f070902e0 | ||
|
|
f9bb673a51 | ||
|
|
c3fa3cb361 | ||
|
|
8ee3a91b18 | ||
|
|
c262491d67 | ||
|
|
2ff0a25c1f | ||
|
|
7176bb2f82 | ||
|
|
785b3bcd49 | ||
|
|
40a95424b0 | ||
|
|
cd28ff03d9 | ||
|
|
7aaba1739a | ||
|
|
7d73c08257 | ||
|
|
d2fd9f12bf | ||
|
|
142367e963 | ||
|
|
7e1916e8e9 | ||
|
|
fe77785833 | ||
|
|
53a9057a2c | ||
|
|
1df0199b91 | ||
|
|
9b298bc024 | ||
|
|
029a4a344f | ||
|
|
3e690f077d | ||
|
|
5c518c7ab8 | ||
|
|
fc578afd40 | ||
|
|
4b3cfee43f | ||
|
|
9369288989 | ||
|
|
8f467159c7 | ||
|
|
5489c3b664 | ||
|
|
1710c83f99 | ||
|
|
219e0316f5 | ||
|
|
60a7f39b16 | ||
|
|
3c9d47b625 | ||
|
|
16c225bb1f | ||
|
|
643c0f3e3c | ||
|
|
d84a6b9041 | ||
|
|
e21922b1f1 | ||
|
|
18e2562146 | ||
|
|
d870e234c3 | ||
|
|
76c9a5b398 | ||
|
|
25cb04e00f | ||
|
|
1c879400fd | ||
|
|
d5f9fa4026 | ||
|
|
11589aff49 | ||
|
|
2a7fd5622a | ||
|
|
df83d9cae4 | ||
|
|
d90f36ba80 | ||
|
|
6c5039c632 | ||
|
|
f44e9355fb |
22
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
22
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
- OS: [e.g. Ubuntu]
|
||||||
|
- Version [e.g. 1.1.2]
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
Steps to reproduce the behavior:
|
||||||
|
|
||||||
|
**Screenshots**
|
||||||
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the problem here.
|
||||||
166
.github/workflows/manual.yml
vendored
Normal file
166
.github/workflows/manual.yml
vendored
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
# This is a basic workflow that is manually triggered
|
||||||
|
|
||||||
|
name: Automatic analysis
|
||||||
|
|
||||||
|
# Controls when the action will run. Workflow runs when manually triggered using the UI
|
||||||
|
# or API.
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '23 21 * * 1'
|
||||||
|
|
||||||
|
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||||
|
jobs:
|
||||||
|
# This workflow contains a single job called "greet"
|
||||||
|
check_version:
|
||||||
|
name: check the latest version
|
||||||
|
# The type of runner that the job will run on
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
outputs:
|
||||||
|
RELEASE_VERSION: ${{ steps.getLatestRelease.outputs.RELEASE_VERSION }}
|
||||||
|
LATEST_VERSION: ${{ steps.checkVersion.outputs.LATEST_VERSION }}
|
||||||
|
steps:
|
||||||
|
- name: Get Latest Release
|
||||||
|
id: getLatestRelease
|
||||||
|
uses: actions/github-script@v3.1.0
|
||||||
|
with:
|
||||||
|
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||||
|
script: |
|
||||||
|
const releaseResponse = await github.repos.getLatestRelease({
|
||||||
|
owner: 'Mas0nShi',
|
||||||
|
repo: 'typoraCracker',
|
||||||
|
})
|
||||||
|
const {
|
||||||
|
data: { tag_name: ver }
|
||||||
|
} = releaseResponse;
|
||||||
|
core.setOutput('RELEASE_VERSION', ver);
|
||||||
|
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v3
|
||||||
|
with:
|
||||||
|
python-version: '3.8' # Version range or exact version of a Python version to use, using SemVer's version range syntax
|
||||||
|
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
|
||||||
|
- name: install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update && DEBIAN_FRONTEND=noninteractive sudo apt-get install innoextract -y
|
||||||
|
python3 -m pip install loguru
|
||||||
|
|
||||||
|
- name: Check Latest Version (use win-x64)
|
||||||
|
id: checkVersion
|
||||||
|
run: |
|
||||||
|
python3 auto-analysis/check_version.py
|
||||||
|
output="$(cat auto-analysis/win/x64/LATEST_VERSION)"
|
||||||
|
echo "$output"
|
||||||
|
echo "::set-output name=LATEST_VERSION::$output"
|
||||||
|
|
||||||
|
create_release:
|
||||||
|
needs: check_version
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
if: needs.check_version.outputs.RELEASE_VERSION != needs.check_version.outputs.LATEST_VERSION
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- run: echo '${{ needs.check_version.outputs.LATEST_VERSION }}'
|
||||||
|
|
||||||
|
- name: Create Runner Release
|
||||||
|
uses: actions/create-release@v1
|
||||||
|
id: createRelease
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
tag_name: '${{ needs.check_version.outputs.LATEST_VERSION }}'
|
||||||
|
release_name: '${{ needs.check_version.outputs.LATEST_VERSION }}'
|
||||||
|
prerelease: false
|
||||||
|
|
||||||
|
|
||||||
|
patch_file:
|
||||||
|
needs: [check_version, create_release]
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
if: needs.check_version.outputs.RELEASE_VERSION != needs.check_version.outputs.LATEST_VERSION
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v3
|
||||||
|
with:
|
||||||
|
python-version: '3.8'
|
||||||
|
architecture: 'x64'
|
||||||
|
- name: install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update && DEBIAN_FRONTEND=noninteractive sudo apt-get install innoextract zip -y
|
||||||
|
python3 -m pip install r2pipe loguru
|
||||||
|
- name: build radare2
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
git clone https://github.com/radareorg/radare2
|
||||||
|
radare2/sys/install.sh
|
||||||
|
- name: patch version
|
||||||
|
run: |
|
||||||
|
python3 auto-analysis/patch.py
|
||||||
|
|
||||||
|
zip -rjq auto-analysis/win/x64/build/typoraCracker.zip auto-analysis/win/x64/build/*
|
||||||
|
zip -rjq auto-analysis/win/x86/build/typoraCracker.zip auto-analysis/win/x86/build/*
|
||||||
|
zip -rjq auto-analysis/linux/x64/build/typoraCracker.zip auto-analysis/linux/x64/build/*
|
||||||
|
|
||||||
|
- name: Check release version
|
||||||
|
id: checkReleaseVersion
|
||||||
|
uses: actions/github-script@v3.1.0
|
||||||
|
with:
|
||||||
|
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||||
|
script: |
|
||||||
|
try {
|
||||||
|
const releaseVersion = '${{ needs.check_version.outputs.LATEST_VERSION }}'
|
||||||
|
const releaseResponse = await github.repos.getReleaseByTag({
|
||||||
|
owner: 'Mas0nShi',
|
||||||
|
repo: 'typoraCracker',
|
||||||
|
tag: releaseVersion
|
||||||
|
})
|
||||||
|
const {
|
||||||
|
data: { id: releaseId, html_url: htmlUrl, upload_url: uploadUrl }
|
||||||
|
} = releaseResponse;
|
||||||
|
core.setOutput('id', releaseId);
|
||||||
|
core.setOutput('html_url', htmlUrl);
|
||||||
|
core.setOutput('upload_url', uploadUrl);
|
||||||
|
core.setOutput('version', releaseVersion);
|
||||||
|
} catch (e) {
|
||||||
|
core.setFailed(e.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Upload win-x64
|
||||||
|
uses: actions/upload-release-asset@v1.0.2
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: '${{ steps.checkReleaseVersion.outputs.upload_url }}'
|
||||||
|
asset_path: '${{ github.workspace }}/auto-analysis/win/x64/build/typoraCracker.zip'
|
||||||
|
asset_name: 'typoraCracker-${{ needs.check_version.outputs.LATEST_VERSION }}-win-x64.zip'
|
||||||
|
asset_content_type: application/zip
|
||||||
|
|
||||||
|
- name: Upload win-x86
|
||||||
|
uses: actions/upload-release-asset@v1.0.2
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: '${{ steps.checkReleaseVersion.outputs.upload_url }}'
|
||||||
|
asset_path: '${{ github.workspace }}/auto-analysis/win/x86/build/typoraCracker.zip'
|
||||||
|
asset_name: 'typoraCracker-${{ needs.check_version.outputs.LATEST_VERSION }}-win-x86.zip'
|
||||||
|
asset_content_type: application/zip
|
||||||
|
|
||||||
|
- name: Upload linux-x64
|
||||||
|
uses: actions/upload-release-asset@v1.0.2
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: '${{ steps.checkReleaseVersion.outputs.upload_url }}'
|
||||||
|
asset_path: '${{ github.workspace }}/auto-analysis/linux/x64/build/typoraCracker.zip'
|
||||||
|
asset_name: 'typoraCracker-${{ needs.check_version.outputs.LATEST_VERSION }}-linux-x64.zip'
|
||||||
|
asset_content_type: application/zip
|
||||||
|
|
||||||
|
- name: verify scripts
|
||||||
|
run: |
|
||||||
|
python3 -m pip install -r auto-analysis/win/x64/build/requirements.txt
|
||||||
|
|
||||||
|
python3 auto-analysis/win/x64/build/typora.py auto-analysis/win/x64/app/resources/app.asar auto-analysis/win/x64
|
||||||
|
echo "win x64 test completed"
|
||||||
|
python3 auto-analysis/win/x86/build/typora.py auto-analysis/win/x86/app/resources/app.asar auto-analysis/win/x86
|
||||||
|
echo "win x86 test completed"
|
||||||
|
python3 auto-analysis/linux/x64/build/typora.py auto-analysis/linux/x64/bin/Typora-linux-x64/resources/app.asar auto-analysis/linux/x64
|
||||||
|
echo "linux x64 test completed"
|
||||||
42
README.md
42
README.md
@@ -1,7 +1,20 @@
|
|||||||
|
**typoraCracker STOPS MAINTENANCE NOW. [why](https://github.com/Mas0nShi/typoraCracker/issues/39#issuecomment-1083117056)?**
|
||||||
|
|
||||||
|
|
||||||
# typora Cracker
|
# typora Cracker
|
||||||
|
|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|
|
||||||
|
|
||||||

|

|
||||||

|

|
||||||
|
[](https://github.com/Mas0nShi/typoraCracker/actions/workflows/manual.yml)
|
||||||
|
|
||||||
|
|
||||||
A extract & decryption and pack & encryption tools for typora.
|
A extract & decryption and pack & encryption tools for typora.
|
||||||
|
|
||||||
@@ -17,17 +30,28 @@ ANY PROBLEMS ARISING FROM THIS WILL BE BORNE BY THE USER (YOU).
|
|||||||
```
|
```
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
- Supports Version 1.0.0+ (At least for now.)
|
- Supports Version 1.0.0 - 1.2.0 [(old archive)](https://github.com/Mas0nShi/typoraCracker/tree/backup-raw)
|
||||||
- tested fine in Windows, Ubuntu
|
- Supports Version 1.2.+ [(Experimental archive)](https://github.com/Mas0nShi/typoraCracker/tree/master)
|
||||||
|
|
||||||
|
## Support List
|
||||||
|
2022.4.4: Experimental support for automatic binary analysis to generate scripts. (v1.2.+)
|
||||||
|
|
||||||
|
| OS / ARCH | x86 | x64 | arm64 |
|
||||||
|
|:---------:|:---:|:---:|:-----:|
|
||||||
|
| win | ✅ | ✅ | ❌ |
|
||||||
|
| linux | ⛔ | ✅ | ❌ |
|
||||||
|
| macOS | ❌ | ❌ | ❌ |
|
||||||
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
1. download in [Release Pages](https://github.com/Mas0nShi/typoraCracker/releases)
|
||||||
1. `pip install -r requirements.txt`
|
2. unzip
|
||||||
2. `python typora.py --help`
|
3. `pip install -r requirements.txt`
|
||||||
3. read and use.
|
4. `python typora.py --help`
|
||||||
4. do something.
|
5. read and use.
|
||||||
5. pack and replace app.asar.
|
6. do something.
|
||||||
6. enjoy it.
|
7. pack and replace app.asar.
|
||||||
|
8. enjoy it.
|
||||||
|
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|||||||
31
README_CN.md
31
README_CN.md
@@ -1,3 +1,6 @@
|
|||||||
|
**typoraCracker 停止维护. [为什么](https://github.com/Mas0nShi/typoraCracker/issues/39#issuecomment-1083117056)**
|
||||||
|
|
||||||
|
|
||||||
# typora Cracker
|
# typora Cracker
|
||||||
|
|
||||||
一个typora的解包&解密,打包&加密工具
|
一个typora的解包&解密,打包&加密工具
|
||||||
@@ -12,18 +15,30 @@
|
|||||||
```
|
```
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
- 版本 1.0.0 - 1.2.0 [(使用旧的存档)](https://github.com/Mas0nShi/typoraCracker/tree/backup-raw)
|
||||||
|
- 版本 1.2.+ [(使用测试存档)](https://github.com/Mas0nShi/typoraCracker/tree/master)
|
||||||
|
|
||||||
|
|
||||||
|
## Support List
|
||||||
|
2022.4.4: 支持自动化分析二进制文件并生成脚本(测试性功能 v1.2.+)
|
||||||
|
|
||||||
|
| OS / ARCH | x86 | x64 | arm64 |
|
||||||
|
|:---------:|:---:|:---:|:-----:|
|
||||||
|
| win | ✅ | ✅ | ❌ |
|
||||||
|
| linux | ⛔ | ✅ | ❌ |
|
||||||
|
| macOS | ❌ | ❌ | ❌ |
|
||||||
|
|
||||||
- 支持版本1.0.0以上(至少现在是这样)
|
|
||||||
- 测试通过平台:Win/Ubuntu
|
|
||||||
|
|
||||||
## 食用方式
|
## 食用方式
|
||||||
|
|
||||||
1. `pip install -r requirements.txt`
|
1. 前往[Release Pages](https://github.com/Mas0nShi/typoraCracker/releases) 下载对应版本
|
||||||
2. `python typora.py --help`
|
2. 解压
|
||||||
3. 阅读帮助文档及使用。
|
3. 安装依赖:`pip install -r requirements.txt`
|
||||||
4. 做你想做的事。
|
4. 使用说明:`python typora.py --help`
|
||||||
5. 打包并替换原目录下的 app.asar。
|
5. 使用。
|
||||||
6. 享受成果。
|
6. 做你想做的事。
|
||||||
|
7. 打包并替换原目录下的 app.asar。
|
||||||
|
8. 享受成果。
|
||||||
|
|
||||||
|
|
||||||
## 示例
|
## 示例
|
||||||
|
|||||||
23
auto-analysis/check_version.py
Normal file
23
auto-analysis/check_version.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from utils import get_version, download_file, extract_file, log
|
||||||
|
from config import DOWNLOAD_LINK
|
||||||
|
import os
|
||||||
|
|
||||||
|
BASE_DIR = os.path.dirname(__file__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_version(download_os, download_arch):
|
||||||
|
from_url = DOWNLOAD_LINK[download_os][download_arch]
|
||||||
|
to_dir = os.path.join(BASE_DIR, f"{download_os}/{download_arch}")
|
||||||
|
|
||||||
|
download_path = os.path.join(to_dir, os.path.basename(from_url))
|
||||||
|
download_file(from_url, download_path)
|
||||||
|
extract_file(download_path, to_dir)
|
||||||
|
version = get_version(to_dir)
|
||||||
|
open(os.path.join(to_dir, "LATEST_VERSION"), "w").write(version)
|
||||||
|
log.success(f"{download_os}-{download_arch} the latest version is {version}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
run_version("win", "x64")
|
||||||
|
# run_version("win", "x86")
|
||||||
|
# run_version("linux", "x64")
|
||||||
25
auto-analysis/config.py
Normal file
25
auto-analysis/config.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
"""
|
||||||
|
@Author: Mas0n
|
||||||
|
@File: config.py
|
||||||
|
@Time: 2022/4/4 19:50
|
||||||
|
@Desc: It's all about getting better.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
DOWNLOAD_LINK = {
|
||||||
|
"win": {
|
||||||
|
"x86": "https://typora.io/windows/typora-setup-ia32.exe",
|
||||||
|
"x64": "https://typora.io/windows/typora-setup-x64.exe",
|
||||||
|
"arm64": "https://typora.io/windows/typora-setup-arm64.exe",
|
||||||
|
},
|
||||||
|
"linux": {
|
||||||
|
"x64": "https://download.typora.io/linux/Typora-linux-x64.tar.gz",
|
||||||
|
"arm64": "https://download.typora.io/linux/Typora-linux-arm64.tar.gz",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
EXTRACT_ROOT_PATH = {
|
||||||
|
"win": "app",
|
||||||
|
"linux": "bin/Typora-linux-x64"
|
||||||
|
}
|
||||||
0
auto-analysis/linux/arm64/LATEST_VERSION
Normal file
0
auto-analysis/linux/arm64/LATEST_VERSION
Normal file
0
auto-analysis/linux/x64/LATEST_VERSION
Normal file
0
auto-analysis/linux/x64/LATEST_VERSION
Normal file
32
auto-analysis/linux/x64/analysis.py
Normal file
32
auto-analysis/linux/x64/analysis.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
"""
|
||||||
|
@Author: Mas0n
|
||||||
|
@Name: typora_linux_x64_analysis
|
||||||
|
@Time: 2022/4/4 19:48
|
||||||
|
@Desc: It's all about getting better.
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
import r2pipe
|
||||||
|
|
||||||
|
|
||||||
|
def get_aes_key_and_iv(file_path):
|
||||||
|
r = r2pipe.open(file_path)
|
||||||
|
|
||||||
|
r.cmd("aaa")
|
||||||
|
regex = r.cmdj("axtj @@ str.base64")
|
||||||
|
assert len(regex) == 1
|
||||||
|
|
||||||
|
func = regex[0]["fcn_name"]
|
||||||
|
r.cmd(f"s {func}")
|
||||||
|
asm = r.cmdj("pdfj")['ops']
|
||||||
|
assert len(asm) != 0
|
||||||
|
|
||||||
|
if 'str.dip3' in json.dumps(asm):
|
||||||
|
r.cmd('s str.dip3 - 32')
|
||||||
|
data = r.cmdj('xj 48')
|
||||||
|
key = bytearray(data[0:32])
|
||||||
|
iv = bytearray(data[32:48])
|
||||||
|
else:
|
||||||
|
raise "need rewrite scripts for linux x64"
|
||||||
|
|
||||||
|
return key, iv
|
||||||
14
auto-analysis/patch.py
Normal file
14
auto-analysis/patch.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
"""
|
||||||
|
@Author: Mas0n
|
||||||
|
@File: patch.py
|
||||||
|
@Time: 2022/4/3 18:36
|
||||||
|
@Desc: It's all about getting better.
|
||||||
|
"""
|
||||||
|
import utils
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
utils.win_x86_run()
|
||||||
|
utils.win_x64_run()
|
||||||
|
utils.linux_x64_run()
|
||||||
|
|
||||||
91
auto-analysis/utils.py
Normal file
91
auto-analysis/utils.py
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
"""
|
||||||
|
@Author: Mas0n
|
||||||
|
@File: utils.py
|
||||||
|
@Time: 2022/4/3 18:36
|
||||||
|
@Desc: It's all about getting better.
|
||||||
|
"""
|
||||||
|
from loguru import logger as log
|
||||||
|
from config import DOWNLOAD_LINK, EXTRACT_ROOT_PATH
|
||||||
|
import subprocess
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
BASE_DIR = os.path.dirname(__file__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_version(to_path):
|
||||||
|
package_file_path = os.path.join(to_path, "app/resources/package.json")
|
||||||
|
package_info = open(package_file_path, "r").read()
|
||||||
|
package_obj = json.loads(package_info)
|
||||||
|
return package_obj["version"]
|
||||||
|
|
||||||
|
|
||||||
|
def download_file(from_link, to_path):
|
||||||
|
subprocess.check_call(["wget", "-q", from_link, "-O", to_path])
|
||||||
|
|
||||||
|
|
||||||
|
def extract_file(from_path, to_path):
|
||||||
|
if from_path.endswith(".exe"):
|
||||||
|
subprocess.check_call(["innoextract", "-q", from_path, "-d", to_path])
|
||||||
|
elif from_path.endswith(".tar.gz"):
|
||||||
|
subprocess.check_call(["tar", "-zxvf", from_path, "-C", to_path])
|
||||||
|
|
||||||
|
|
||||||
|
def patch_file(_key, _iv, to_dir):
|
||||||
|
exports_file_path = os.path.join(BASE_DIR, "../exports.tar.gz")
|
||||||
|
save_dir = os.path.join(to_dir, "build")
|
||||||
|
if not os.path.exists(save_dir):
|
||||||
|
os.makedirs(save_dir)
|
||||||
|
|
||||||
|
subprocess.check_call(["tar", "-zxvf", exports_file_path, "-C", save_dir])
|
||||||
|
patch_file_path = os.path.join(save_dir, "typora.py")
|
||||||
|
content = open(patch_file_path, "r").read()
|
||||||
|
content = content.replace("{AES_KEY}", f"b''.fromhex('{_key}')")
|
||||||
|
content = content.replace("{AES_IV}", f"b''.fromhex('{_iv}')")
|
||||||
|
open(patch_file_path, "w").write(content)
|
||||||
|
|
||||||
|
|
||||||
|
def scheduler(func, basedir, link, root_path):
|
||||||
|
download_path = os.path.join(basedir, os.path.basename(link))
|
||||||
|
log.info(f"downloading from {link}")
|
||||||
|
download_file(link, download_path)
|
||||||
|
log.info("ready extract package")
|
||||||
|
|
||||||
|
extract_file(download_path, basedir)
|
||||||
|
log.info("preparation stage completed")
|
||||||
|
|
||||||
|
main_node_path = os.path.join(basedir, os.path.join(root_path, "resources/app.asar.unpacked/main.node"))
|
||||||
|
log.info("auto analysis start")
|
||||||
|
key, iv = func.get_aes_key_and_iv(main_node_path)
|
||||||
|
log.success("analysis done")
|
||||||
|
|
||||||
|
patch_file(key.hex(), iv.hex(), basedir)
|
||||||
|
log.success("patch done")
|
||||||
|
|
||||||
|
|
||||||
|
def win_x64_run():
|
||||||
|
from win.x64 import analysis
|
||||||
|
dirs = os.path.join(BASE_DIR, "win/x64")
|
||||||
|
url = DOWNLOAD_LINK["win"]["x64"]
|
||||||
|
scheduler(func=analysis, basedir=dirs, link=url, root_path=EXTRACT_ROOT_PATH["win"])
|
||||||
|
|
||||||
|
|
||||||
|
def win_x86_run():
|
||||||
|
from win.x86 import analysis
|
||||||
|
dirs = os.path.join(BASE_DIR, "win/x86")
|
||||||
|
url = DOWNLOAD_LINK["win"]["x86"]
|
||||||
|
scheduler(func=analysis, basedir=dirs, link=url, root_path=EXTRACT_ROOT_PATH["win"])
|
||||||
|
|
||||||
|
|
||||||
|
def linux_x64_run():
|
||||||
|
from linux.x64 import analysis
|
||||||
|
dirs = os.path.join(BASE_DIR, "linux/x64")
|
||||||
|
url = DOWNLOAD_LINK["linux"]["x64"]
|
||||||
|
scheduler(func=analysis, basedir=dirs, link=url, root_path=EXTRACT_ROOT_PATH["linux"])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
win_x86_run()
|
||||||
|
win_x64_run()
|
||||||
|
linux_x64_run()
|
||||||
0
auto-analysis/win/arm64/LATEST_VERSION
Normal file
0
auto-analysis/win/arm64/LATEST_VERSION
Normal file
0
auto-analysis/win/x64/LATEST_VERSION
Normal file
0
auto-analysis/win/x64/LATEST_VERSION
Normal file
38
auto-analysis/win/x64/analysis.py
Normal file
38
auto-analysis/win/x64/analysis.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
"""
|
||||||
|
@Author: Mas0n
|
||||||
|
@Name: typora_win_x64_analysis
|
||||||
|
@Time: 2022/4/3 18:26
|
||||||
|
@Desc: It's all about getting better.
|
||||||
|
"""
|
||||||
|
import struct
|
||||||
|
import r2pipe
|
||||||
|
|
||||||
|
|
||||||
|
def regex_key_iv(asm_obj):
|
||||||
|
asm_regex = []
|
||||||
|
for body in asm_obj:
|
||||||
|
if "=[4]" in body["esil"] and body['type'] == 'mov':
|
||||||
|
opcode, value = body["disasm"].split(", ")
|
||||||
|
if "0x" in value:
|
||||||
|
asm_regex.append({"opcode": opcode, "value": value})
|
||||||
|
return asm_regex
|
||||||
|
|
||||||
|
|
||||||
|
def get_aes_key_and_iv(file_path):
|
||||||
|
r = r2pipe.open(file_path)
|
||||||
|
r.cmd("aaa")
|
||||||
|
regex = r.cmdj("axtj @@ str.base64")
|
||||||
|
assert len(regex) == 1
|
||||||
|
|
||||||
|
func = regex[0]["fcn_name"]
|
||||||
|
r.cmd(f"s {func}")
|
||||||
|
asm = r.cmdj("pdfj")['ops']
|
||||||
|
assert len(asm) != 0
|
||||||
|
|
||||||
|
asm_regex = regex_key_iv(asm)
|
||||||
|
assert len(asm_regex) == 12
|
||||||
|
|
||||||
|
iv = struct.pack("<4L", *[int(asm_regex[i]['value'], 16) for i in range(4)])
|
||||||
|
key = struct.pack("<8L", *[int(asm_regex[i]['value'], 16) for i in range(4, 12)])
|
||||||
|
return key, iv
|
||||||
0
auto-analysis/win/x86/LATEST_VERSION
Normal file
0
auto-analysis/win/x86/LATEST_VERSION
Normal file
45
auto-analysis/win/x86/analysis.py
Normal file
45
auto-analysis/win/x86/analysis.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
"""
|
||||||
|
@Author: Mas0n
|
||||||
|
@Name: typora_win_x86_analysis
|
||||||
|
@Time: 2022/4/3 18:36
|
||||||
|
@Desc: It's all about getting better.
|
||||||
|
"""
|
||||||
|
import struct
|
||||||
|
import r2pipe
|
||||||
|
|
||||||
|
|
||||||
|
def regex_key_iv(asm_obj):
|
||||||
|
asm_regex = []
|
||||||
|
for body in asm_obj:
|
||||||
|
if "=[4]" in body["esil"] and body['type'] == 'mov':
|
||||||
|
opcode, value = body["disasm"].split(", ")
|
||||||
|
if "0x" in value:
|
||||||
|
asm_regex.append({"opcode": opcode, "value": value})
|
||||||
|
return asm_regex
|
||||||
|
|
||||||
|
|
||||||
|
def get_aes_key_and_iv(file_path):
|
||||||
|
r = r2pipe.open(file_path)
|
||||||
|
r.cmd("aaa")
|
||||||
|
regex = r.cmdj("axtj @@ str.base64")
|
||||||
|
assert len(regex) == 1
|
||||||
|
|
||||||
|
func = regex[0]["fcn_name"]
|
||||||
|
r.cmd(f"s {func}")
|
||||||
|
asm = r.cmdj("pdfj")['ops']
|
||||||
|
assert len(asm) != 0
|
||||||
|
|
||||||
|
asm_regex = regex_key_iv(asm)
|
||||||
|
|
||||||
|
iv = struct.pack("<4L", *[int(asm_regex[i]['value'], 16) for i in range(4)])
|
||||||
|
|
||||||
|
# find the set key func
|
||||||
|
call_regex = [i for i in asm if i['size'] == 5 and i['type'] == 'call']
|
||||||
|
r.cmd(f"s {call_regex[1]['jump']}")
|
||||||
|
asm = r.cmdj("pdfj")["ops"]
|
||||||
|
asm_regex = regex_key_iv(asm)
|
||||||
|
assert len(asm_regex) == 8
|
||||||
|
|
||||||
|
key = struct.pack("<8L", *[int(asm_regex[i]['value'], 16) for i in range(8)])
|
||||||
|
return key, iv
|
||||||
BIN
exports.tar.gz
Normal file
BIN
exports.tar.gz
Normal file
Binary file not shown.
176
masar.py
176
masar.py
@@ -1,176 +0,0 @@
|
|||||||
# -*- coding:utf-8 -*-
|
|
||||||
"""
|
|
||||||
@Author: Mas0n
|
|
||||||
@File: masar.py
|
|
||||||
@Time: 2021-11-29 22:34
|
|
||||||
@Desc: It's all about getting better.
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
import errno
|
|
||||||
import io
|
|
||||||
import struct
|
|
||||||
import shutil
|
|
||||||
import fileinput
|
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
def round_up(i, m):
|
|
||||||
return (i + m - 1) & ~(m - 1)
|
|
||||||
|
|
||||||
|
|
||||||
class Asar:
|
|
||||||
def __init__(self, path, fp, header, base_offset):
|
|
||||||
self.path = path
|
|
||||||
self.fp = fp
|
|
||||||
self.header = header
|
|
||||||
self.base_offset = base_offset
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def open(cls, path):
|
|
||||||
fp = open(path, 'rb')
|
|
||||||
data_size, header_size, header_object_size, header_string_size = struct.unpack('<4I', fp.read(16))
|
|
||||||
header_json = fp.read(header_string_size).decode('utf-8')
|
|
||||||
return cls(
|
|
||||||
path=path,
|
|
||||||
fp=fp,
|
|
||||||
header=json.loads(header_json),
|
|
||||||
base_offset=round_up(16 + header_string_size, 4)
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def compress(cls, path):
|
|
||||||
offset = 0
|
|
||||||
paths = []
|
|
||||||
|
|
||||||
def _path_to_dict(path):
|
|
||||||
nonlocal offset, paths
|
|
||||||
result = {'files': {}}
|
|
||||||
for f in os.scandir(path):
|
|
||||||
if os.path.isdir(f.path):
|
|
||||||
result['files'][f.name] = _path_to_dict(f.path)
|
|
||||||
elif f.is_symlink():
|
|
||||||
result['files'][f.name] = {
|
|
||||||
'link': os.path.realpath(f.name)
|
|
||||||
}
|
|
||||||
# modify
|
|
||||||
elif f.name == "main.node":
|
|
||||||
size = f.stat().st_size
|
|
||||||
result['files'][f.name] = {
|
|
||||||
'size': size,
|
|
||||||
"unpacked": True
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
paths.append(f.path)
|
|
||||||
size = f.stat().st_size
|
|
||||||
result['files'][f.name] = {
|
|
||||||
'size': size,
|
|
||||||
'offset': str(offset)
|
|
||||||
}
|
|
||||||
offset += size
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _paths_to_bytes(paths):
|
|
||||||
_bytes = io.BytesIO()
|
|
||||||
with fileinput.FileInput(files=paths, mode="rb") as f:
|
|
||||||
for i in f:
|
|
||||||
_bytes.write(i)
|
|
||||||
return _bytes.getvalue()
|
|
||||||
|
|
||||||
header = _path_to_dict(path)
|
|
||||||
header_json = json.dumps(header, sort_keys=True, separators=(',', ':')).encode('utf-8')
|
|
||||||
header_string_size = len(header_json)
|
|
||||||
data_size = 4
|
|
||||||
aligned_size = round_up(header_string_size, data_size)
|
|
||||||
header_size = aligned_size + 8
|
|
||||||
header_object_size = aligned_size + data_size
|
|
||||||
diff = aligned_size - header_string_size
|
|
||||||
header_json = header_json + b'\0' * diff if diff else header_json
|
|
||||||
fp = io.BytesIO()
|
|
||||||
fp.write(struct.pack('<4I', data_size, header_size, header_object_size, header_string_size))
|
|
||||||
fp.write(header_json)
|
|
||||||
fp.write(_paths_to_bytes(paths))
|
|
||||||
|
|
||||||
return cls(
|
|
||||||
path=path,
|
|
||||||
fp=fp,
|
|
||||||
header=header,
|
|
||||||
base_offset=round_up(16 + header_string_size, 4))
|
|
||||||
|
|
||||||
def _copy_unpacked_file(self, source, destination):
|
|
||||||
unpacked_dir = self.path + '.unpacked'
|
|
||||||
if not os.path.isdir(unpacked_dir):
|
|
||||||
print("Couldn't copy file {}, no extracted directory".format(source))
|
|
||||||
return
|
|
||||||
|
|
||||||
src = os.path.join(unpacked_dir, source)
|
|
||||||
if not os.path.exists(src):
|
|
||||||
print("Couldn't copy file {}, doesn't exist".format(src))
|
|
||||||
return
|
|
||||||
|
|
||||||
dest = os.path.join(destination, source)
|
|
||||||
shutil.copyfile(src, dest)
|
|
||||||
|
|
||||||
def _extract_file(self, source, info, destination):
|
|
||||||
if 'offset' not in info:
|
|
||||||
self._copy_unpacked_file(source, destination)
|
|
||||||
return
|
|
||||||
|
|
||||||
self.fp.seek(self.base_offset + int(info['offset']))
|
|
||||||
r = self.fp.read(int(info['size']))
|
|
||||||
|
|
||||||
dest = os.path.join(destination, source)
|
|
||||||
with open(dest, 'wb') as f:
|
|
||||||
f.write(r)
|
|
||||||
|
|
||||||
def _extract_link(self, source, link, destination):
|
|
||||||
dest_filename = os.path.normpath(os.path.join(destination, source))
|
|
||||||
link_src_path = os.path.dirname(os.path.join(destination, link))
|
|
||||||
link_to = os.path.join(link_src_path, os.path.basename(link))
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.symlink(link_to, dest_filename)
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.EXIST:
|
|
||||||
os.unlink(dest_filename)
|
|
||||||
os.symlink(link_to, dest_filename)
|
|
||||||
else:
|
|
||||||
raise e
|
|
||||||
|
|
||||||
def _extract_directory(self, source, files, destination):
|
|
||||||
dest = os.path.normpath(os.path.join(destination, source))
|
|
||||||
|
|
||||||
if not os.path.exists(dest):
|
|
||||||
os.makedirs(dest)
|
|
||||||
|
|
||||||
for name, info in files.items():
|
|
||||||
item_path = os.path.join(source, name)
|
|
||||||
|
|
||||||
if 'files' in info:
|
|
||||||
self._extract_directory(item_path, info['files'], destination)
|
|
||||||
elif 'link' in info:
|
|
||||||
self._extract_link(item_path, info['link'], destination)
|
|
||||||
else:
|
|
||||||
self._extract_file(item_path, info, destination)
|
|
||||||
|
|
||||||
def extract(self, path):
|
|
||||||
if not os.path.isdir(path):
|
|
||||||
raise NotADirectoryError()
|
|
||||||
self._extract_directory('.', self.header['files'], path)
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, traceback):
|
|
||||||
self.fp.close()
|
|
||||||
|
|
||||||
|
|
||||||
def pack_asar(source, dest):
|
|
||||||
with Asar.compress(source) as a:
|
|
||||||
with open(dest, 'wb') as fp:
|
|
||||||
a.fp.seek(0)
|
|
||||||
fp.write(a.fp.read())
|
|
||||||
|
|
||||||
|
|
||||||
def extract_asar(source, dest):
|
|
||||||
with Asar.open(source) as a:
|
|
||||||
a.extract(dest)
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
jsbeautifier==1.14.0
|
|
||||||
jsmin==3.0.0
|
|
||||||
loguru==0.5.3
|
|
||||||
pycryptodome==3.11.0
|
|
||||||
183
typora.py
183
typora.py
@@ -1,183 +0,0 @@
|
|||||||
# -*- coding:utf-8 -*-
|
|
||||||
"""
|
|
||||||
@Author: Mas0n
|
|
||||||
@File: typora.py
|
|
||||||
@Time: 2021-11-29 21:24
|
|
||||||
@Desc: It's all about getting better.
|
|
||||||
"""
|
|
||||||
from Crypto.Cipher import AES
|
|
||||||
from Crypto.Util.Padding import pad, unpad
|
|
||||||
from base64 import b64decode, b64encode
|
|
||||||
from jsbeautifier import beautify
|
|
||||||
from jsmin import jsmin
|
|
||||||
from os import listdir, urandom, makedirs
|
|
||||||
from os.path import isfile, isdir, join as pjoin, split as psplit, exists, abspath
|
|
||||||
from loguru import logger as log
|
|
||||||
from masar import extract_asar, pack_asar
|
|
||||||
from shutil import rmtree
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# DEBUG
|
|
||||||
DEBUG = False
|
|
||||||
|
|
||||||
log.remove()
|
|
||||||
if DEBUG:
|
|
||||||
log.add(sys.stderr, level="DEBUG")
|
|
||||||
else:
|
|
||||||
log.add(sys.stderr, level="INFO")
|
|
||||||
|
|
||||||
AES_KEY = struct.pack("<4Q", *[0x252A4C7BD0B85281, 0xA31BD92CE099F719, 0x13E283392646D82D, 0x118BDE501CF74120])
|
|
||||||
AES_IV = struct.pack("<4L", *[0x33706964, 0x5387CDD2, 0xD05F336D, 0x53F82468])
|
|
||||||
|
|
||||||
|
|
||||||
def _mkDir(_path):
|
|
||||||
if not exists(_path):
|
|
||||||
makedirs(_path)
|
|
||||||
else:
|
|
||||||
if _path == psplit(__file__)[0]:
|
|
||||||
log.warning("plz try not to use the root dir.")
|
|
||||||
else:
|
|
||||||
log.warning(f"May FolderExists: {_path}")
|
|
||||||
|
|
||||||
|
|
||||||
def decScript(b64: bytes, prettify: bool):
|
|
||||||
lCode = b64decode(b64)
|
|
||||||
# iv
|
|
||||||
aesIv = AES_IV
|
|
||||||
# cipher text
|
|
||||||
cipherText = lCode[:]
|
|
||||||
# AES 256 CBC
|
|
||||||
ins = AES.new(key=AES_KEY, iv=aesIv, mode=AES.MODE_CBC)
|
|
||||||
code = unpad(ins.decrypt(cipherText), 16, 'pkcs7')
|
|
||||||
if prettify:
|
|
||||||
code = beautify(code.decode()).encode()
|
|
||||||
return code
|
|
||||||
|
|
||||||
|
|
||||||
def extractWdec(asarPath, path, prettify):
|
|
||||||
"""
|
|
||||||
:param prettify: bool
|
|
||||||
:param asarPath: asar out dir
|
|
||||||
:param path: out dir
|
|
||||||
:return: None
|
|
||||||
"""
|
|
||||||
# try to create empty dir to save extract files
|
|
||||||
path = pjoin(path, "typoraCrackerTemp")
|
|
||||||
|
|
||||||
if exists(path):
|
|
||||||
rmtree(path)
|
|
||||||
_mkDir(path)
|
|
||||||
|
|
||||||
log.info(f"extract asar file: {asarPath}")
|
|
||||||
# extract app.asar to {path}/*
|
|
||||||
extract_asar(asarPath, path)
|
|
||||||
log.success(f"extract ended.")
|
|
||||||
|
|
||||||
log.info(f"read Directory: {path}")
|
|
||||||
# construct the save directory {pathRoot}/dec_app
|
|
||||||
outPath = pjoin(psplit(path)[0], "dec_app")
|
|
||||||
# try to create empty dir to save decryption files
|
|
||||||
if exists(outPath):
|
|
||||||
rmtree(outPath)
|
|
||||||
_mkDir(outPath)
|
|
||||||
|
|
||||||
log.info(f"set Directory: {outPath}")
|
|
||||||
# enumerate extract files
|
|
||||||
fileArr = listdir(path)
|
|
||||||
for name in fileArr:
|
|
||||||
# read files content
|
|
||||||
fpath = pjoin(path, name)
|
|
||||||
scode = open(fpath, "rb").read()
|
|
||||||
log.info(f"open file: {name}")
|
|
||||||
# if file suffix is *.js then decryption file
|
|
||||||
if isfile(fpath) and name.endswith(".js"):
|
|
||||||
scode = decScript(scode, prettify)
|
|
||||||
else:
|
|
||||||
log.debug(f"skip file: {name}")
|
|
||||||
# save content {outPath}/{name}
|
|
||||||
open(pjoin(outPath, name), "wb").write(scode)
|
|
||||||
log.success(f"decrypt and save file: {name}")
|
|
||||||
|
|
||||||
rmtree(path)
|
|
||||||
log.debug("remove temp dir")
|
|
||||||
|
|
||||||
|
|
||||||
def encScript(_code: bytes, compress):
|
|
||||||
if compress:
|
|
||||||
_code = jsmin(_code.decode(), quote_chars="'\"`").encode()
|
|
||||||
aesIv = AES_IV
|
|
||||||
cipherText = _code
|
|
||||||
ins = AES.new(key=AES_KEY, iv=aesIv, mode=AES.MODE_CBC)
|
|
||||||
enc = ins.encrypt(pad(cipherText, 16, 'pkcs7'))
|
|
||||||
lCode = b64encode(enc)
|
|
||||||
return lCode
|
|
||||||
|
|
||||||
|
|
||||||
def packWenc(path, outPath, compress):
|
|
||||||
"""
|
|
||||||
:param path: out dir
|
|
||||||
:param outPath: pack path app.asar
|
|
||||||
:param compress: Bool
|
|
||||||
:return: None
|
|
||||||
"""
|
|
||||||
# check out path
|
|
||||||
if isfile(outPath):
|
|
||||||
log.error("plz input Directory for app.asar")
|
|
||||||
raise NotADirectoryError
|
|
||||||
|
|
||||||
_mkDir(outPath)
|
|
||||||
|
|
||||||
encFilePath = pjoin(psplit(outPath)[0], "typoraCrackerTemp")
|
|
||||||
if exists(encFilePath):
|
|
||||||
rmtree(encFilePath)
|
|
||||||
_mkDir(encFilePath)
|
|
||||||
|
|
||||||
outFilePath = pjoin(outPath, "app.asar")
|
|
||||||
log.info(f"set outFilePath: {outFilePath}")
|
|
||||||
fileArr = listdir(path)
|
|
||||||
|
|
||||||
for name in fileArr:
|
|
||||||
fpath = pjoin(path, name)
|
|
||||||
if isdir(fpath):
|
|
||||||
log.error("TODO: found folder")
|
|
||||||
raise IsADirectoryError
|
|
||||||
|
|
||||||
scode = open(fpath, "rb").read()
|
|
||||||
log.info(f"open file: {name}")
|
|
||||||
if isfile(fpath) and name.endswith(".js"):
|
|
||||||
scode = encScript(scode, compress)
|
|
||||||
|
|
||||||
open(pjoin(encFilePath, name), "wb").write(scode)
|
|
||||||
log.success(f"encrypt and save file: {name}")
|
|
||||||
|
|
||||||
log.info("ready to pack")
|
|
||||||
pack_asar(encFilePath, outFilePath)
|
|
||||||
log.success("pack done")
|
|
||||||
|
|
||||||
rmtree(encFilePath)
|
|
||||||
log.debug("remove temp dir")
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
argParser = ArgumentParser(
|
|
||||||
description="[extract and decryption / pack and encryption] app.asar file from [Typora].",
|
|
||||||
epilog="If you have any questions, please contact [ MasonShi@88.com ]")
|
|
||||||
argParser.add_argument("asarPath", type=str, help="app.asar file path/dir [input/ouput]")
|
|
||||||
argParser.add_argument("dirPath", type=str, help="as tmp and out directory.")
|
|
||||||
|
|
||||||
argParser.add_argument('-u', dest='mode', action='store_const',
|
|
||||||
const=packWenc, default=extractWdec,
|
|
||||||
help='pack & encryption (default: extract & decryption)')
|
|
||||||
argParser.add_argument('-f', dest='format', action='store_const',
|
|
||||||
const=True, default=False,
|
|
||||||
help='enabled prettify/compress (default: disabled)')
|
|
||||||
args = argParser.parse_args()
|
|
||||||
|
|
||||||
args.mode(args.asarPath, args.dirPath, args.format)
|
|
||||||
log.success("Done!")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
Reference in New Issue
Block a user