Merge pull request #550 from EstrellaXD/3.1-dev

3.1.8
This commit is contained in:
Estrella Pan
2023-10-08 19:59:36 +08:00
committed by GitHub
11 changed files with 178 additions and 44 deletions

View File

@@ -1,4 +1,5 @@
from os.path import expandvars
from typing import Literal
from pydantic import BaseModel, Field
@@ -88,7 +89,18 @@ class ExperimentalOpenAI(BaseModel):
api_base: str = Field(
"https://api.openai.com/v1", description="OpenAI api base url"
)
model: str = Field("gpt-3.5-turbo", description="OpenAI model")
api_type: Literal["azure", "openai"] = Field(
"openai", description="OpenAI api type, usually for azure"
)
api_version: str = Field(
"2023-05-15", description="OpenAI api version, only for Azure"
)
model: str = Field(
"gpt-3.5-turbo", description="OpenAI model, ignored when api type is azure"
)
deployment_id: str = Field(
"", description="Azure OpenAI deployment id, ignored when api type is openai"
)
class Config(BaseModel):

View File

@@ -87,11 +87,20 @@ class RequestURL:
self.session = requests.Session()
if settings.proxy.enable:
if "http" in settings.proxy.type:
url = f"{settings.proxy.type}://{settings.proxy.host}:{settings.proxy.port}"
self.session.proxies = {
"https": url,
"http": url,
}
if settings.proxy.username:
username=settings.proxy.username
password=settings.proxy.password
url = f"http://{username}:{password}@{settings.proxy.host}:{settings.proxy.port}"
self.session.proxies = {
"http": url,
"https": url,
}
else:
url = f"http://{settings.proxy.host}:{settings.proxy.port}"
self.session.proxies = {
"http": url,
"https": url,
}
elif settings.proxy.type == "socks5":
self._socks5_proxy = True
socks.set_default_proxy(

View File

@@ -4,8 +4,13 @@ def rss_parser(soup):
torrent_homepage = []
for item in soup.findall("./channel/item"):
torrent_titles.append(item.find("title").text)
torrent_urls.append(item.find("enclosure").attrib["url"])
torrent_homepage.append(item.find("link").text)
enclosure = item.find("enclosure")
if enclosure is not None:
torrent_homepage.append(item.find("link").text)
torrent_urls.append(enclosure.attrib.get("url"))
else:
torrent_urls.append(item.find("link").text)
torrent_homepage.append("")
return torrent_titles, torrent_urls, torrent_homepage

View File

@@ -1,6 +1,7 @@
import asyncio
import json
import logging
from concurrent.futures import ThreadPoolExecutor
from typing import Any
import openai
@@ -98,25 +99,13 @@ class OpenAIParser:
if not prompt:
prompt = DEFAULT_PROMPT
async def complete() -> str:
resp = await openai.ChatCompletion.acreate(
api_key=self._api_key,
api_base=self.api_base,
model=self.model,
messages=[
dict(role="system", content=prompt),
dict(role="user", content=text),
],
# set temperature to 0 to make results be more stable and reproducible.
temperature=0,
**self.openai_kwargs,
)
params = self._prepare_params(text, prompt)
with ThreadPoolExecutor(max_workers=1) as worker:
future = worker.submit(openai.ChatCompletion.create, **params)
resp = future.result()
result = resp["choices"][0]["message"]["content"]
return result
loop = asyncio.get_event_loop()
result = loop.run_until_complete(complete())
if asdict:
try:
@@ -127,3 +116,36 @@ class OpenAIParser:
logger.debug(f"the parsed result is: {result}")
return result
def _prepare_params(self, text: str, prompt: str) -> dict[str, Any]:
"""_prepare_params is a helper function to prepare params for openai library.
There are some differences between openai and azure openai api, so we need to
prepare params for them.
Args:
text (str): the text to be parsed
prompt (str): the custom prompt
Returns:
dict[str, Any]: the prepared key value pairs.
"""
params = dict(
api_key=self._api_key,
api_base=self.api_base,
messages=[
dict(role="system", content=prompt),
dict(role="user", content=text),
],
# set temperature to 0 to make results be more stable and reproducible.
temperature=0,
)
api_type = self.openai_kwargs.get("api_type", "openai")
if api_type == "azure":
params["deployment_id"] = self.openai_kwargs.get("deployment_id", "")
params["api_version"] = self.openai_kwargs.get("api_version", "2023-05-15")
params["api_type"] = "azure"
else:
params["model"] = self.model
return params

View File

@@ -21,8 +21,9 @@ BangumiJSON: TypeAlias = str
class SearchTorrent(RequestContent, RSSAnalyser):
def search_torrents(self, rss_item: RSSItem) -> list[Torrent]:
torrents = self.get_torrents(rss_item.url)
return torrents
return self.get_torrents(rss_item.url)
# torrents = self.get_torrents(rss_item.url)
# return torrents
def analyse_keyword(
self, keywords: list[str], site: str = "mikan", limit: int = 5

View File

@@ -1,19 +1,57 @@
import json
import os
from unittest import mock
from dotenv import load_dotenv
from module.parser.analyser.openai import OpenAIParser
load_dotenv()
from module.parser.analyser.openai import DEFAULT_PROMPT, OpenAIParser
class TestOpenAIParser:
@classmethod
def setup_class(cls):
api_key = os.getenv("OPENAI_API_KEY") or "testing!"
api_key = "testing!"
cls.parser = OpenAIParser(api_key=api_key)
def test__prepare_params_with_openai(self):
text = "hello world"
expected = dict(
api_key=self.parser._api_key,
api_base=self.parser.api_base,
messages=[
dict(role="system", content=DEFAULT_PROMPT),
dict(role="user", content=text),
],
temperature=0,
model=self.parser.model,
)
params = self.parser._prepare_params(text, DEFAULT_PROMPT)
assert expected == params
def test__prepare_params_with_azure(self):
azure_parser = OpenAIParser(
api_key="aaabbbcc",
api_base="https://test.openai.azure.com/",
api_type="azure",
api_version="2023-05-15",
deployment_id="gpt-35-turbo",
)
text = "hello world"
expected = dict(
api_key=azure_parser._api_key,
api_base=azure_parser.api_base,
messages=[
dict(role="system", content=DEFAULT_PROMPT),
dict(role="user", content=text),
],
temperature=0,
deployment_id="gpt-35-turbo",
api_version="2023-05-15",
api_type="azure",
)
params = azure_parser._prepare_params(text, DEFAULT_PROMPT)
assert expected == params
def test_parse(self):
text = "[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"
expected = {

View File

@@ -1,6 +1,3 @@
import json
import os
import pytest
from module.conf import settings
from module.parser.title_parser import TitleParser

View File

@@ -1,20 +1,29 @@
<script lang="ts" setup>
import { Caution } from '@icon-park/vue-next';
import type { SettingItem } from '#/components';
import type { ExperimentalOpenAI, OpenAIModel } from '#/config';
import type { ExperimentalOpenAI, OpenAIModel, OpenAIType } from '#/config';
const { t } = useMyI18n();
const { getSettingGroup } = useConfigStore();
const openAI = getSettingGroup('experimental_openai');
const openAIModels: OpenAIModel = ['gpt-3.5-turbo'];
const openAITypes: OpenAIType = ['openai', 'azure'];
const items: SettingItem<ExperimentalOpenAI>[] = [
const sharedItems: SettingItem<ExperimentalOpenAI>[] = [
{
configKey: 'enable',
label: () => t('config.experimental_openai_set.enable'),
type: 'switch',
},
{
configKey: 'api_type',
label: () => t('config.experimental_openai_set.api_type'),
type: 'select',
prop: {
items: openAITypes,
},
},
{
configKey: 'api_key',
label: () => t('config.experimental_openai_set.api_key'),
@@ -33,6 +42,9 @@ const items: SettingItem<ExperimentalOpenAI>[] = [
placeholder: 'OpenAI API Base URL',
},
},
];
const openAIItems: SettingItem<ExperimentalOpenAI>[] = [
...sharedItems,
{
configKey: 'model',
label: () => t('config.experimental_openai_set.model'),
@@ -42,6 +54,28 @@ const items: SettingItem<ExperimentalOpenAI>[] = [
},
},
];
const azureItems: SettingItem<ExperimentalOpenAI>[] = [
...sharedItems,
{
configKey: 'api_version',
label: () => t('config.experimental_openai_set.api_version'),
type: 'input',
prop: {
type: 'text',
placeholder: 'e.g: 2023-05-15',
},
},
{
configKey: 'deployment_id',
label: () => t('config.experimental_openai_set.deployment_id'),
type: 'input',
prop: {
type: 'text',
placeholder: 'e.g: gpt-35-turbo',
},
},
];
</script>
<template>
@@ -53,7 +87,7 @@ const items: SettingItem<ExperimentalOpenAI>[] = [
<div space-y-12px>
<ab-setting
v-for="i in items"
v-for="i in openAI.api_type === 'azure' ? azureItems : openAIItems"
:key="i.configKey"
v-bind="i"
v-model:data="openAI[i.configKey]"

View File

@@ -142,8 +142,11 @@
"warning": "Warning: Experimental feature is not yet stable. Please use with caution.",
"enable": "Enable OpenAI",
"api_key": "OpenAI API Key",
"api_base": "OpenAI API Base URL",
"model": "OpenAI Model"
"api_base": "OpenAI API Base URL (Azure entrypoint)",
"model": "OpenAI Model",
"api_type": "OpenAI API Type",
"api_version": "Azure OpenAI Version",
"deployment_id": "Azure OpenAI Deployment ID"
},
"media_player_set": {
"title": "Media Player Setting",

View File

@@ -142,8 +142,11 @@
"warning": "警告:实验功能尚未稳定,请谨慎使用",
"enable": "启用 OpenAI",
"api_key": "OpenAI API Key",
"api_base": "OpenAI API Base URL",
"model": "OpenAI 模型"
"api_base": "OpenAI API Base URL (Azure entrypoint)",
"model": "OpenAI 模型",
"api_type": "OpenAI API 类型",
"api_version": "Azure OpenAI 版本",
"deployment_id": "Azure OpenAI Deployment ID"
},
"media_player_set": {
"title": "播放器设置",

View File

@@ -52,6 +52,10 @@ export interface Config {
api_key: string;
api_base: string;
model: 'gpt-3.5-turbo';
// azure
api_type: 'openai' | 'azure';
api_version?: string;
deployment_id?: string;
};
}
@@ -107,6 +111,10 @@ export const initConfig: Config = {
api_key: '',
api_base: 'https://api.openai.com/v1/',
model: 'gpt-3.5-turbo',
// azure
api_type: 'openai',
api_version: '2020-05-03',
deployment_id: '',
},
};
@@ -137,3 +145,5 @@ export type ProxyType = UnionToTuple<Proxy['type']>;
export type NotificationType = UnionToTuple<Notification['type']>;
/** OpenAI Model List */
export type OpenAIModel = UnionToTuple<ExperimentalOpenAI['model']>;
/** OpenAI API Type */
export type OpenAIType = UnionToTuple<ExperimentalOpenAI['api_type']>;