implemented Github API

This commit is contained in:
iganeshk
2020-01-19 03:07:17 -05:00
parent 146266bd24
commit 2a8ea5219b
2 changed files with 151 additions and 140 deletions

View File

@@ -1,13 +1,14 @@
<img alt="LaMetric-System-Monitor" src="https://standardnotes.org/assets/icon.png"/>
## Standard Notes Extensions - Self-Hosted Repository
Host Standard Notes extensions on your own server. This utility parses most of the open-source extensions available from original repository as well as other authors and builds a extensions repository which can be plugged directly into Standard Notes Web/Desktop Clients. (https://standardnotes.org/)
Host Standard Notes extensions on your own server. This utility parses most of the open-source extensions available from original repository as well as from other authors and builds an extensions repository which then can be plugged directly into Standard Notes Web/Desktop Clients. (https://standardnotes.org/)
Extensions are listed as YAML in the `\extensions` sub-directory, pull a request if you'd like to add yours.
### Requirements
* Python 3
* Python 3 - pyyaml module
* pyyaml module
* requests module
### Usage
@@ -19,26 +20,42 @@ $ cd standardnotes-extensions
$ pip3 install -r requirements.txt
```
* Replace `your-domain.com` at the end of the `build-repo.py` file with your domain name:
* Use the env.sample to create a .env file for your environment variables. The utility will automatically load these when it starts.
```
main(os.getenv('URL', 'https://your-domain.com/extensions'))
# Sample ENV setup Variables (YAML)
# Copy this file and update as needed.
#
# $ cp env.sample .env
#
# Do not include this new file in source control
# Github Credentials
# Generate your token here: https://github.com/settings/tokens
# No additional permission required, this is just to avoid github api rate limits
#
domain: https://your-domain.com/extensions
github:
username: USERNAME
token: TOKEN
```
* [Optional] Make additions or appropriate changes in `/extensions` directory
* [Optional] Make additions or appropriate changes in `/extensions` directory.
* Run the utility:
```bash
$ python3 build-repo.py
```
* Server the `/public` directory and verify if the endpoint is reachable
* Serve the `/public` directory and verify if the endpoint is reachable.
```
https://your-domain.com/extensions/index.json
```
* Import the above endpoint into the web/desktop client.
* Import the above endpoint into the web/desktop client. (Note: Enable CORS for your web server respectively, nginx setup provided below)
### Setup with nginx as reverse-proxy
### Setup with nginx
```nginx
location ^~ /extensions {
@@ -80,4 +97,3 @@ https://your-domain.com/extensions/index.json
* Dracula Theme by https://github.com/cameronldn
### ToDo
* Implement the usage of GitHub API for efficiency.

View File

@@ -11,18 +11,18 @@ public/
| | |-... <- other files
|-index.json <- repo info, contain all extensions' info
'''
# from subprocess import run, PIPE
from zipfile import ZipFile
import json
from subprocess import run, PIPE
import sys
import os
import json
import shutil
from zipfile import ZipFile
import requests
import yaml
def get_environment(base_dir):
"""
Load .env file if present
Parse the environment variables from .env
"""
temp_envvar = yaml.load("""
domain: https://domain.com/extensions
@@ -33,24 +33,18 @@ def get_environment(base_dir):
if os.path.isfile(os.path.join(base_dir, ".env")):
with open(os.path.join(base_dir, ".env")) as temp_env_file:
temp_envvar = yaml.load(temp_env_file, Loader=yaml.FullLoader)
return temp_envvar
# Environment file missing
print("Please set your environment file (read env.sample)")
print("You might be rate limited while parsing extensions from Github, if you continue!")
input("Press any key to continue: ")
return temp_envvar
def process_zipball(repo_dir, release_version):
"""
Get release zipball and extract archive without the root directory
Grab the release zipball and extract it without the root/parent/top directory
"""
with ZipFile(os.path.join(repo_dir, release_version) + ".zip", 'r') as zipball:
for member in zipball.namelist():
# Parse files without root directory
# Parse files list excluding the top/parent/root directory
filename = '/'.join(member.split('/')[1:])
# Ignore the parent folder
# Now ignore it
if filename == '': continue
# Ignore dot files
if filename.startswith('.'): continue
@@ -68,10 +62,51 @@ def process_zipball(repo_dir, release_version):
os.remove(os.path.join(repo_dir, release_version) + ".zip")
def git_clone_method(ext_yaml, public_dir, ext_has_update):
"""
Get the latest repository and parse for metadata
"""
repo_name = ext_yaml['github'].split('/')[-1]
repo_dir = os.path.join(public_dir, repo_name)
run([
'git', 'clone', 'https://github.com/{github}.git'.format(**ext_yaml),
'--quiet', '{}_tmp'.format(repo_name)
],
check=True)
ext_last_commit = (run([
'git', '--git-dir=' +
os.path.join(public_dir, '{}_tmp'.format(repo_name), '.git'),
'rev-list', '--tags', '--max-count=1'], stdout=PIPE, check=True).stdout.decode('utf-8').replace("\n", ""))
ext_version = run([
'git', '--git-dir',
os.path.join(public_dir, '{}_tmp'.format(repo_name), '.git'),
'describe', '--tags', ext_last_commit], stdout=PIPE, check=True).stdout.decode('utf-8').replace("\n", "")
# check if the latest version already exist
if not os.path.exists(
os.path.join(repo_dir, ext_version)):
ext_has_update = True
shutil.move(
os.path.join(public_dir, '{}_tmp'.format(repo_name)),
os.path.join(public_dir, repo_name,
'{}'.format(ext_version)))
# Delete .git resource from the directory
shutil.rmtree(
os.path.join(public_dir, repo_name,
'{}'.format(ext_version), '.git'))
else:
# ext already up-to-date
# print('Extension: {} - {} (already up-to-date)'.format(ext_yaml['name'], ext_version))
# clean-up
shutil.rmtree(os.path.join(public_dir, '{}_tmp'.format(repo_name)))
return ext_version, ext_has_update
def parse_extensions(base_dir, base_url, ghub_session):
"""
Build Standard Notes extensions repository using Github meta-data
"""
extension_dir = os.path.join(base_dir, 'extensions')
public_dir = os.path.join(base_dir, 'public')
if not os.path.exists(os.path.join(public_dir)):
@@ -79,125 +114,80 @@ def parse_extensions(base_dir, base_url, ghub_session):
os.chdir(public_dir)
extensions = []
# Read and parse all extension info
for extfiles in os.listdir(extension_dir):
if not extfiles.endswith('.yaml'):
continue
with open(os.path.join(extension_dir, extfiles)) as extyaml:
ext = yaml.load(extyaml, Loader=yaml.FullLoader)
ext_yaml = yaml.load(extyaml, Loader=yaml.FullLoader)
# Get extension Github meta-data
ext_git_info = json.loads(ghub_session.get('https://api.github.com/repos/{github}/releases/latest'.format(**ext)).text)
repo_name = ext['github'].split('/')[-1]
ext_has_update = False
repo_name = ext_yaml['github'].split('/')[-1]
repo_dir = os.path.join(public_dir, repo_name)
# Check if extension directory alredy exists
if not os.path.exists(repo_dir):
os.makedirs(repo_dir)
# Check if extension with current release alredy exists
if not os.path.exists(os.path.join(repo_dir, ext_git_info['tag_name'])):
os.makedirs(os.path.join(repo_dir, ext_git_info['tag_name']))
# Grab the release and then unpack it
with requests.get(ext_git_info['zipball_url'], stream=True) as r:
with open(os.path.join(repo_dir, ext_git_info['tag_name']) + ".zip", 'wb') as f:
shutil.copyfileobj(r.raw, f)
# unpack the zipball
process_zipball(repo_dir, ext_git_info['tag_name'])
# Build extension info
# https://example.com/sub-domain/my-extension/version/index.html
extension_url = '/'.join([base_url, repo_name, ext_git_info['tag_name'], ext['main']])
# https://example.com/sub-domain/my-extension/index.json
extension_info_url = '/'.join([base_url, repo_name, 'index.json'])
extension = dict(
identifier=ext['id'],
name=ext['name'],
content_type=ext['content_type'],
area=ext.get('area', None),
version=ext_git_info['tag_name'],
description=ext.get('description', None),
marketing_url=ext.get('marketing_url', None),
thumbnail_url=ext.get('thumbnail_url', None),
valid_until='2030-05-16T18:35:33.000Z',
url=extension_url,
download_url='https://github.com/{github}/archive/{version}.zip'.
format(**ext),
latest_url=extension_info_url,
flags=ext.get('flags', []),
dock_icon=ext.get('dock_icon', {}),
layerable=ext.get('layerable', None),
statusBar=ext.get('statusBar', None),
)
# If we don't have a Github API Sesssion, using cloning repos instead
if ghub_session is not None:
# Github API Method
# Get extension Github meta-data
ext_git_info = json.loads(ghub_session.get('https://api.github.com/repos/{github}/releases/latest'.format(**ext_yaml)).text)
ext_version = ext_git_info['tag_name']
# Check if extension directory alredy exists
if not os.path.exists(repo_dir):
os.makedirs(repo_dir)
# Check if extension with current release alredy exists
if not os.path.exists(os.path.join(repo_dir, ext_version)):
ext_has_update = True
os.makedirs(os.path.join(repo_dir, ext_version))
# Grab the release and then unpack it
with requests.get(ext_git_info['zipball_url'], stream=True) as zipball_stream:
with open(os.path.join(repo_dir, ext_version) + ".zip", 'wb') as zipball_file:
shutil.copyfileobj(zipball_stream.raw, zipball_file)
# unpack the zipball
process_zipball(repo_dir, ext_version)
else:
ext_version, ext_has_update = git_clone_method(ext_yaml, public_dir, ext_has_update)
# Strip empty values
extension = {k: v for k, v in extension.items() if v}
# Build extension info (stateless)
# https://domain.com/sub-domain/my-extension/index.json
extension = dict(
identifier=ext_yaml['id'],
name=ext_yaml['name'],
content_type=ext_yaml['content_type'],
area=ext_yaml.get('area', None),
version=ext_version,
description=ext_yaml.get('description', None),
marketing_url=ext_yaml.get('marketing_url', None),
thumbnail_url=ext_yaml.get('thumbnail_url', None),
valid_until='2030-05-16T18:35:33.000Z',
url='/'.join([base_url, repo_name, ext_version, ext_yaml['main']]),
download_url='https://github.com/{}/archive/{}.zip'.
format(ext_yaml['github'], ext_version),
latest_url='/'.join([base_url, repo_name, 'index.json']),
flags=ext_yaml.get('flags', []),
dock_icon=ext_yaml.get('dock_icon', {}),
layerable=ext_yaml.get('layerable', None),
statusBar=ext_yaml.get('statusBar', None),
)
""" To-be deprecated Method
# Get the latest repository and parse for latest version
# TO-DO: Implement usage of Github API for efficiency
# Strip empty values
extension = {k: v for k, v in extension.items() if v}
run([
'git', 'clone', 'https://github.com/{github}.git'.format(**ext),
'--quiet', '{}_temp'.format(repo_name)
],
check=True)
ext_latest = (run([
'git', '--git-dir=' +
os.path.join(public_dir, '{}_temp'.format(repo_name), '.git'),
'rev-list', '--tags', '--max-count=1'
],
stdout=PIPE,
check=True).stdout.decode('utf-8').replace("\n", ""))
ext_latest_version = run([
'git', '--git-dir',
os.path.join(public_dir, '{}_temp'.format(repo_name), '.git'),
'describe', '--tags', ext_latest
],
stdout=PIPE,
check=True).stdout.decode('utf-8').replace(
"\n", "")
# Tag the latest releases
extension['version'] = ext_latest_version
extension['url'] = '/'.join([
base_url, repo_name, '{}'.format(ext_latest_version), ext['main']
])
extension['download_url'] = (
'https://github.com/{}/archive/{}.zip'.format(
ext['github'], ext_latest_version))
# check if latest version already exists
if not os.path.exists(
os.path.join(public_dir, repo_name,
'{}'.format(ext_latest_version))):
shutil.move(
os.path.join(public_dir, '{}_temp'.format(repo_name)),
os.path.join(public_dir, repo_name,
'{}'.format(ext_latest_version)))
# Delete .git resource from the directory
shutil.rmtree(
os.path.join(public_dir, repo_name,
'{}'.format(ext_latest_version), '.git'))
else:
# clean-up
shutil.rmtree(os.path.join(public_dir,
'{}_temp'.format(repo_name)))
"""
# Check if extension is already up-to-date ()
if ext_has_update:
# Generate JSON file for each extension
with open(os.path.join(public_dir, repo_name, 'index.json'),
'w') as ext_json:
json.dump(extension, ext_json, indent=4)
print('Extension: {:30s} {:6s}\t(updated)'.format(ext_yaml['name'], ext_version))
else:
# ext already up-to-date
print('Extension: {:30s} {:6s}\t(already up-to-date)'.format(ext_yaml['name'], ext_version))
extensions.append(extension)
print('Loaded extension: {} - {}'.format(ext['name'],
ext_git_info['tag_name']))
extensions.append(extension)
os.chdir('..')
# Generate the index JSON file
# Generate the main repository index JSON
with open(os.path.join(public_dir, 'index.json'), 'w') as ext_json:
json.dump(
dict(
@@ -209,9 +199,6 @@ def parse_extensions(base_dir, base_url, ghub_session):
indent=4,
)
# Terminate Session
ghub_session.close()
def main():
"""
@@ -224,20 +211,28 @@ def main():
while base_url.endswith('/'):
base_url = base_url[:-1]
# Get a re-usable session object using user credentials
ghub_session = requests.Session()
ghub_session.auth = (env_var['github']['username'], env_var['github']['token'])
try:
ghub_verify = ghub_session.get("https://api.github.com/")
if not ghub_verify.headers['status'] == "200 OK":
print("Error: %s " % ghub_verify.headers['status'])
print("Bad Github credentials in the .env file, check and try again.")
exit(1)
except Exception as e:
print("Error %s" % e)
# Build extensions
parse_extensions(base_dir, base_url, ghub_session)
if (env_var['github']['username'] and env_var['github']['token']):
# Get a re-usable session object using user credentials
ghub_session = requests.Session()
ghub_session.auth = (env_var['github']['username'], env_var['github']['token'])
try:
ghub_verify = ghub_session.get("https://api.github.com/")
if not ghub_verify.headers['status'] == "200 OK":
print("Error: %s " % ghub_verify.headers['status'])
print("Bad Github credentials in the .env file, check and try again.")
sys.exit(1)
except Exception as e:
print("Unknown error occured: %s" % e)
# Build extensions
parse_extensions(base_dir, base_url, ghub_session)
# Terminate Session
ghub_session.close()
else:
# Environment file missing
print("Environment not set (read env.sample)")
input("⚠️ This method is set to be deprecated soon, Press any key to continue:\n")
parse_extensions(base_dir, base_url, None)
sys.exit(0)
if __name__ == '__main__':
# If URL variable
main()