mirror of
https://github.com/jie65535/JChatGPT.git
synced 2025-07-28 18:59:20 +08:00
Compare commits
3 Commits
6c034ab2a7
...
f822999ab4
Author | SHA1 | Date | |
---|---|---|---|
f822999ab4 | |||
3c4373e1ff | |||
89794b587e |
@ -7,7 +7,7 @@ plugins {
|
||||
}
|
||||
|
||||
group = "top.jie65535.mirai"
|
||||
version = "1.5.0"
|
||||
version = "1.7.0"
|
||||
|
||||
mirai {
|
||||
jvmTarget = JavaVersion.VERSION_11
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -23,7 +23,7 @@ object LaTeXConverter {
|
||||
g2.color = Color.white
|
||||
g2.fillRect(0, 0, icon.iconWidth, icon.iconHeight)
|
||||
val jl = JLabel()
|
||||
jl.setForeground(Color(0, 0, 0))
|
||||
jl.setForeground(Color.black)
|
||||
icon.paintIcon(jl, g2, 0, 0)
|
||||
val stream = ByteArrayOutputStream()
|
||||
ImageIO.write(image, format, stream)
|
||||
|
40
src/main/kotlin/LargeLanguageModels.kt
Normal file
40
src/main/kotlin/LargeLanguageModels.kt
Normal file
@ -0,0 +1,40 @@
|
||||
package top.jie65535.mirai
|
||||
|
||||
import com.aallam.openai.api.http.Timeout
|
||||
import com.aallam.openai.client.Chat
|
||||
import com.aallam.openai.client.OpenAI
|
||||
import com.aallam.openai.client.OpenAIHost
|
||||
import kotlin.time.Duration.Companion.milliseconds
|
||||
|
||||
object LargeLanguageModels {
|
||||
var chat: Chat? = null
|
||||
var reasoning: Chat? = null
|
||||
var visual: Chat? = null
|
||||
|
||||
fun reload() {
|
||||
val timeout = PluginConfig.timeout.milliseconds
|
||||
if (PluginConfig.openAiApi.isNotBlank() && PluginConfig.openAiToken.isNotBlank()) {
|
||||
chat = OpenAI(
|
||||
token = PluginConfig.openAiToken,
|
||||
host = OpenAIHost(baseUrl = PluginConfig.openAiApi),
|
||||
timeout = Timeout(request = timeout, connect = timeout, socket = timeout)
|
||||
)
|
||||
}
|
||||
|
||||
if (PluginConfig.reasoningModelApi.isNotBlank() && PluginConfig.reasoningModelToken.isNotBlank()) {
|
||||
reasoning = OpenAI(
|
||||
token = PluginConfig.reasoningModelToken,
|
||||
host = OpenAIHost(baseUrl = PluginConfig.reasoningModelApi),
|
||||
timeout = Timeout(request = timeout, connect = timeout, socket = timeout)
|
||||
)
|
||||
}
|
||||
|
||||
if (PluginConfig.visualModelApi.isNotBlank() && PluginConfig.visualModelToken.isNotBlank()) {
|
||||
visual = OpenAI(
|
||||
token = PluginConfig.visualModelToken,
|
||||
host = OpenAIHost(baseUrl = PluginConfig.visualModelApi),
|
||||
timeout = Timeout(request = timeout, connect = timeout, socket = timeout)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
@ -18,7 +18,7 @@ object PluginCommands : CompositeCommand(
|
||||
suspend fun CommandSender.setToken(token: String) {
|
||||
PluginConfig.openAiToken = token
|
||||
PluginConfig.save()
|
||||
JChatGPT.updateOpenAiToken(token)
|
||||
LargeLanguageModels.reload()
|
||||
sendMessage("OK")
|
||||
}
|
||||
|
||||
@ -45,7 +45,7 @@ object PluginCommands : CompositeCommand(
|
||||
@SubCommand
|
||||
suspend fun CommandSender.reload() {
|
||||
PluginConfig.reload()
|
||||
JChatGPT.updateOpenAiToken(PluginConfig.openAiToken)
|
||||
LargeLanguageModels.reload()
|
||||
sendMessage("OK")
|
||||
}
|
||||
}
|
@ -6,7 +6,7 @@ import net.mamoe.mirai.console.data.value
|
||||
|
||||
object PluginConfig : AutoSavePluginConfig("Config") {
|
||||
@ValueDescription("OpenAI API base url")
|
||||
val openAiApi: String by value("https://api.openai.com/v1/")
|
||||
val openAiApi: String by value("https://dashscope.aliyuncs.com/compatible-mode/v1/")
|
||||
|
||||
@ValueDescription("OpenAI API Token")
|
||||
var openAiToken: String by value("")
|
||||
@ -14,11 +14,32 @@ object PluginConfig : AutoSavePluginConfig("Config") {
|
||||
@ValueDescription("Chat模型")
|
||||
var chatModel: String by value("qwen-max")
|
||||
|
||||
@ValueDescription("推理模型API")
|
||||
var reasoningModelApi: String by value("https://dashscope.aliyuncs.com/compatible-mode/v1/")
|
||||
|
||||
@ValueDescription("推理模型Token")
|
||||
var reasoningModelToken: String by value("")
|
||||
|
||||
@ValueDescription("推理模型")
|
||||
var reasoningModel: String by value("qwq-plus")
|
||||
|
||||
@ValueDescription("Chat默认提示")
|
||||
var prompt: String by value("")
|
||||
@ValueDescription("视觉模型API")
|
||||
var visualModelApi: String by value("https://dashscope.aliyuncs.com/compatible-mode/v1/")
|
||||
|
||||
@ValueDescription("视觉模型Token")
|
||||
var visualModelToken: String by value("")
|
||||
|
||||
@ValueDescription("视觉模型")
|
||||
var visualModel: String by value("qwen-vl-plus")
|
||||
|
||||
@ValueDescription("Jina API Key")
|
||||
val jinaApiKey by value("")
|
||||
|
||||
@ValueDescription("SearXNG 搜索引擎地址,如 http://127.0.0.1:8080/search 必须启用允许json格式返回")
|
||||
val searXngUrl: String by value("")
|
||||
|
||||
@ValueDescription("在线运行代码 glot.io 的 api token,在官网注册账号即可获取。")
|
||||
val glotToken: String by value("")
|
||||
|
||||
@ValueDescription("群管理是否自动拥有对话权限,默认是")
|
||||
val groupOpHasChatPermission: Boolean by value(true)
|
||||
@ -26,17 +47,14 @@ object PluginConfig : AutoSavePluginConfig("Config") {
|
||||
@ValueDescription("好友是否自动拥有对话权限,默认是")
|
||||
val friendHasChatPermission: Boolean by value(true)
|
||||
|
||||
@ValueDescription("群荣誉等级权限门槛,达到这个等级相当于自动拥有权限。")
|
||||
val temperaturePermission: Int by value(60)
|
||||
@ValueDescription("群荣誉等级权限门槛,达到这个等级相当于自动拥有对话权限。")
|
||||
val temperaturePermission: Int by value(50)
|
||||
|
||||
@ValueDescription("等待响应超时时间,单位毫秒,默认60秒")
|
||||
val timeout: Long by value(60000L)
|
||||
|
||||
@ValueDescription("SearXNG 搜索引擎地址,如 http://127.0.0.1:8080/search 必须启用允许json格式返回")
|
||||
val searXngUrl: String by value("")
|
||||
|
||||
@ValueDescription("在线运行代码 glot.io 的 api token,在官网注册账号即可获取。")
|
||||
val glotToken: String by value("")
|
||||
@ValueDescription("系统提示词")
|
||||
var prompt: String by value("你是一个乐于助人的助手")
|
||||
|
||||
@ValueDescription("创建Prompt时取最近多少分钟内的消息")
|
||||
val historyWindowMin: Int by value(10)
|
||||
@ -50,6 +68,9 @@ object PluginConfig : AutoSavePluginConfig("Config") {
|
||||
@ValueDescription("达到需要合并转发消息的阈值")
|
||||
val messageMergeThreshold by value(150)
|
||||
|
||||
@ValueDescription("最大重试次数,至少2次,最后一次请求不会带工具,非工具调用相当于正常回复")
|
||||
val retryMax: Int by value(3)
|
||||
@ValueDescription("最大循环次数,至少2次")
|
||||
val retryMax: Int by value(5)
|
||||
|
||||
@ValueDescription("关键字呼叫,支持正则表达式")
|
||||
val callKeyword by value("[小筱][林淋月玥]")
|
||||
}
|
@ -3,7 +3,12 @@ package top.jie65535.mirai.tools
|
||||
import com.aallam.openai.api.chat.Tool
|
||||
import io.ktor.client.*
|
||||
import io.ktor.client.engine.okhttp.*
|
||||
import io.ktor.client.plugins.HttpTimeout
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.SupervisorJob
|
||||
import kotlinx.serialization.json.JsonObject
|
||||
import net.mamoe.mirai.event.events.MessageEvent
|
||||
|
||||
abstract class BaseAgent(
|
||||
val tool: Tool
|
||||
@ -18,11 +23,33 @@ abstract class BaseAgent(
|
||||
*/
|
||||
open val loadingMessage: String = ""
|
||||
|
||||
/**
|
||||
* HTTP客户端
|
||||
*/
|
||||
protected val httpClient by lazy {
|
||||
HttpClient(OkHttp)
|
||||
HttpClient(OkHttp) {
|
||||
install(HttpTimeout) {
|
||||
requestTimeoutMillis = 60000
|
||||
connectTimeoutMillis = 5000
|
||||
socketTimeoutMillis = 15000
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
abstract suspend fun execute(args: JsonObject?): String
|
||||
/**
|
||||
* 协程作用域
|
||||
*/
|
||||
protected val scope by lazy {
|
||||
CoroutineScope(Dispatchers.IO + SupervisorJob())
|
||||
}
|
||||
|
||||
open suspend fun execute(args: JsonObject?): String {
|
||||
return "OK"
|
||||
}
|
||||
|
||||
open suspend fun execute(args: JsonObject?, event: MessageEvent): String {
|
||||
return execute(args)
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "${tool.function.name}: ${tool.function.description}"
|
||||
|
@ -5,8 +5,8 @@ import com.aallam.openai.api.chat.ChatMessage
|
||||
import com.aallam.openai.api.chat.Tool
|
||||
import com.aallam.openai.api.core.Parameters
|
||||
import com.aallam.openai.api.model.ModelId
|
||||
import com.aallam.openai.client.Chat
|
||||
import kotlinx.serialization.json.*
|
||||
import top.jie65535.mirai.LargeLanguageModels
|
||||
import top.jie65535.mirai.PluginConfig
|
||||
|
||||
class ReasoningAgent : BaseAgent(
|
||||
@ -18,32 +18,30 @@ class ReasoningAgent : BaseAgent(
|
||||
putJsonObject("properties") {
|
||||
putJsonObject("prompt") {
|
||||
put("type", "string")
|
||||
put("description", "用于调用推理模型的提示")
|
||||
put("description", "用于调用推理模型的提示词")
|
||||
}
|
||||
}
|
||||
putJsonArray("required") {
|
||||
add("question")
|
||||
add("prompt")
|
||||
}
|
||||
},
|
||||
)
|
||||
) {
|
||||
var llm: Chat? = null
|
||||
|
||||
override val loadingMessage: String
|
||||
get() = "深度思考中..."
|
||||
|
||||
override val isEnabled: Boolean
|
||||
get() = llm != null
|
||||
get() = LargeLanguageModels.reasoning != null
|
||||
|
||||
override suspend fun execute(args: JsonObject?): String {
|
||||
requireNotNull(args)
|
||||
val llm = llm ?: return "未配置llm,无法进行推理。"
|
||||
val llm = LargeLanguageModels.reasoning ?: return "未配置llm,无法进行推理。"
|
||||
|
||||
val prompt = args.getValue("prompt").jsonPrimitive.content
|
||||
val answerContent = StringBuilder()
|
||||
llm.chatCompletions(ChatCompletionRequest(
|
||||
model = ModelId(PluginConfig.reasoningModel),
|
||||
messages = listOf(ChatMessage.Companion.User(prompt))
|
||||
messages = listOf(ChatMessage.User(prompt))
|
||||
)).collect {
|
||||
if (it.choices.isNotEmpty()) {
|
||||
val delta = it.choices[0].delta ?: return@collect
|
||||
|
50
src/main/kotlin/tools/SendCompositeMessage.kt
Normal file
50
src/main/kotlin/tools/SendCompositeMessage.kt
Normal file
@ -0,0 +1,50 @@
|
||||
package top.jie65535.mirai.tools
|
||||
|
||||
import com.aallam.openai.api.chat.Tool
|
||||
import com.aallam.openai.api.core.Parameters
|
||||
import kotlinx.serialization.json.JsonObject
|
||||
import kotlinx.serialization.json.add
|
||||
import kotlinx.serialization.json.jsonPrimitive
|
||||
import kotlinx.serialization.json.put
|
||||
import kotlinx.serialization.json.putJsonArray
|
||||
import kotlinx.serialization.json.putJsonObject
|
||||
import net.mamoe.mirai.event.events.MessageEvent
|
||||
import net.mamoe.mirai.message.data.buildForwardMessage
|
||||
import top.jie65535.mirai.JChatGPT
|
||||
import top.jie65535.mirai.PluginConfig
|
||||
import kotlin.collections.getValue
|
||||
|
||||
class SendCompositeMessage : BaseAgent(
|
||||
tool = Tool.function(
|
||||
name = "sendCompositeMessage",
|
||||
description = "发送组合消息,适合发送较长消息而避免刷屏(不支持Markdown)",
|
||||
parameters = Parameters.buildJsonObject {
|
||||
put("type", "object")
|
||||
putJsonObject("properties") {
|
||||
putJsonObject("content") {
|
||||
put("type", "string")
|
||||
put("description", "消息内容")
|
||||
}
|
||||
}
|
||||
putJsonArray("required") {
|
||||
add("content")
|
||||
}
|
||||
}
|
||||
)
|
||||
) {
|
||||
override suspend fun execute(args: JsonObject?, event: MessageEvent): String {
|
||||
requireNotNull(args)
|
||||
val content = args.getValue("content").jsonPrimitive.content
|
||||
val msg = JChatGPT.toMessage(event.subject, content)
|
||||
event.subject.sendMessage(
|
||||
if (content.length > PluginConfig.messageMergeThreshold) {
|
||||
event.buildForwardMessage {
|
||||
event.bot says msg
|
||||
}
|
||||
} else {
|
||||
msg
|
||||
}
|
||||
)
|
||||
return "OK"
|
||||
}
|
||||
}
|
33
src/main/kotlin/tools/SendSingleMessageAgent.kt
Normal file
33
src/main/kotlin/tools/SendSingleMessageAgent.kt
Normal file
@ -0,0 +1,33 @@
|
||||
package top.jie65535.mirai.tools
|
||||
|
||||
import com.aallam.openai.api.chat.Tool
|
||||
import com.aallam.openai.api.core.Parameters
|
||||
import kotlinx.serialization.json.*
|
||||
import net.mamoe.mirai.event.events.MessageEvent
|
||||
import top.jie65535.mirai.JChatGPT
|
||||
|
||||
class SendSingleMessageAgent : BaseAgent(
|
||||
tool = Tool.function(
|
||||
name = "sendSingleMessage",
|
||||
description = "发送一条消息,适合发送一行以内的短句(不支持Markdown)",
|
||||
parameters = Parameters.buildJsonObject {
|
||||
put("type", "object")
|
||||
putJsonObject("properties") {
|
||||
putJsonObject("content") {
|
||||
put("type", "string")
|
||||
put("description", "消息内容")
|
||||
}
|
||||
}
|
||||
putJsonArray("required") {
|
||||
add("content")
|
||||
}
|
||||
}
|
||||
)
|
||||
) {
|
||||
override suspend fun execute(args: JsonObject?, event: MessageEvent): String {
|
||||
requireNotNull(args)
|
||||
val content = args.getValue("content").jsonPrimitive.content
|
||||
event.subject.sendMessage(JChatGPT.toMessage(event.subject, content))
|
||||
return "OK"
|
||||
}
|
||||
}
|
12
src/main/kotlin/tools/StopLoopAgent.kt
Normal file
12
src/main/kotlin/tools/StopLoopAgent.kt
Normal file
@ -0,0 +1,12 @@
|
||||
package top.jie65535.mirai.tools
|
||||
|
||||
import com.aallam.openai.api.chat.Tool
|
||||
import com.aallam.openai.api.core.Parameters
|
||||
|
||||
class StopLoopAgent : BaseAgent(
|
||||
tool = Tool.function(
|
||||
name = "endConversation",
|
||||
description = "结束本轮对话",
|
||||
parameters = Parameters.Empty
|
||||
)
|
||||
)
|
71
src/main/kotlin/tools/VisitWeb.kt
Normal file
71
src/main/kotlin/tools/VisitWeb.kt
Normal file
@ -0,0 +1,71 @@
|
||||
package top.jie65535.mirai.tools
|
||||
|
||||
import com.aallam.openai.api.chat.Tool
|
||||
import com.aallam.openai.api.core.Parameters
|
||||
import io.ktor.client.request.*
|
||||
import io.ktor.client.statement.*
|
||||
import kotlinx.coroutines.async
|
||||
import kotlinx.coroutines.awaitAll
|
||||
import kotlinx.serialization.json.*
|
||||
import top.jie65535.mirai.PluginConfig
|
||||
|
||||
class VisitWeb : BaseAgent(
|
||||
tool = Tool.function(
|
||||
name = "visit",
|
||||
description = "Visit webpage(s) and return the summary of the content.",
|
||||
parameters = Parameters.buildJsonObject {
|
||||
put("type", "object")
|
||||
putJsonObject("properties") {
|
||||
putJsonObject("url") {
|
||||
putJsonArray("type") {
|
||||
add("string")
|
||||
add("array")
|
||||
}
|
||||
putJsonObject("items") {
|
||||
put("type", "string")
|
||||
}
|
||||
put("minItems", 1)
|
||||
put("description", "The URL(s) of the webpage(s) to visit. Can be a single URL or an array of URLs.")
|
||||
}
|
||||
}
|
||||
|
||||
putJsonArray("required") {
|
||||
add("url")
|
||||
}
|
||||
}
|
||||
)
|
||||
) {
|
||||
companion object {
|
||||
// Visit Tool (Using Jina Reader)
|
||||
const val JINA_READER_URL_PREFIX = "https://r.jina.ai/"
|
||||
}
|
||||
|
||||
override val isEnabled: Boolean
|
||||
get() = PluginConfig.jinaApiKey.isNotEmpty()
|
||||
|
||||
override val loadingMessage: String
|
||||
get() = "访问网页中..."
|
||||
|
||||
override suspend fun execute(args: JsonObject?): String {
|
||||
requireNotNull(args)
|
||||
val urlJson = args.getValue("url")
|
||||
if (urlJson is JsonPrimitive) {
|
||||
return jinaReadPage(urlJson.content)
|
||||
} else if (urlJson is JsonArray) {
|
||||
return urlJson.map {
|
||||
scope.async { jinaReadPage(it.jsonPrimitive.content) }
|
||||
}.awaitAll().joinToString()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
private suspend fun jinaReadPage(url: String): String {
|
||||
return try {
|
||||
httpClient.get(JINA_READER_URL_PREFIX + url) {
|
||||
header("Authorization", "Bearer ${PluginConfig.jinaApiKey}")
|
||||
}.bodyAsText()
|
||||
} catch (e: Throwable) {
|
||||
"Error fetching \"$url\": ${e.message}"
|
||||
}
|
||||
}
|
||||
}
|
76
src/main/kotlin/tools/VisualAgent.kt
Normal file
76
src/main/kotlin/tools/VisualAgent.kt
Normal file
@ -0,0 +1,76 @@
|
||||
package top.jie65535.mirai.tools
|
||||
|
||||
import com.aallam.openai.api.chat.ChatCompletionRequest
|
||||
import com.aallam.openai.api.chat.ChatMessage
|
||||
import com.aallam.openai.api.chat.ImagePart
|
||||
import com.aallam.openai.api.chat.TextPart
|
||||
import com.aallam.openai.api.chat.Tool
|
||||
import com.aallam.openai.api.core.Parameters
|
||||
import com.aallam.openai.api.model.ModelId
|
||||
import kotlinx.serialization.json.JsonObject
|
||||
import kotlinx.serialization.json.add
|
||||
import kotlinx.serialization.json.jsonPrimitive
|
||||
import kotlinx.serialization.json.put
|
||||
import kotlinx.serialization.json.putJsonArray
|
||||
import kotlinx.serialization.json.putJsonObject
|
||||
import top.jie65535.mirai.LargeLanguageModels
|
||||
import top.jie65535.mirai.PluginConfig
|
||||
|
||||
class VisualAgent : BaseAgent(
|
||||
tool = Tool.function(
|
||||
name = "visualAgent",
|
||||
description = "可通过调用视觉模型识别图片。",
|
||||
parameters = Parameters.buildJsonObject {
|
||||
put("type", "object")
|
||||
putJsonObject("properties") {
|
||||
putJsonObject("image_url") {
|
||||
put("type", "string")
|
||||
put("description", "图片地址")
|
||||
}
|
||||
putJsonObject("prompt") {
|
||||
put("type", "string")
|
||||
put("description", "用于调用视觉模型的提示词")
|
||||
}
|
||||
}
|
||||
putJsonArray("required") {
|
||||
add("image_url")
|
||||
add("prompt")
|
||||
}
|
||||
}
|
||||
)
|
||||
) {
|
||||
override val loadingMessage: String
|
||||
get() = "图片识别中..."
|
||||
|
||||
override val isEnabled: Boolean
|
||||
get() = LargeLanguageModels.visual != null
|
||||
|
||||
override suspend fun execute(args: JsonObject?): String {
|
||||
requireNotNull(args)
|
||||
val llm = LargeLanguageModels.visual ?: return "未配置llm,无法进行识别。"
|
||||
val imageUrl = args.getValue("image_url").jsonPrimitive.content
|
||||
val prompt = args.getValue("prompt").jsonPrimitive.content
|
||||
|
||||
val answerContent = StringBuilder()
|
||||
llm.chatCompletions(ChatCompletionRequest(
|
||||
model = ModelId(PluginConfig.visualModel),
|
||||
messages = listOf(
|
||||
ChatMessage.System("You are a helpful assistant."),
|
||||
ChatMessage.User(
|
||||
content = listOf(
|
||||
ImagePart(imageUrl),
|
||||
TextPart(prompt)
|
||||
)
|
||||
)
|
||||
)
|
||||
)).collect {
|
||||
if (it.choices.isNotEmpty()) {
|
||||
val delta = it.choices[0].delta ?: return@collect
|
||||
if (!delta.content.isNullOrEmpty()) {
|
||||
answerContent.append(delta.content)
|
||||
}
|
||||
}
|
||||
}
|
||||
return answerContent.toString().ifEmpty { "识图异常,结果为空" }
|
||||
}
|
||||
}
|
@ -7,6 +7,7 @@ import io.ktor.client.statement.*
|
||||
import io.ktor.http.*
|
||||
import kotlinx.serialization.json.*
|
||||
import org.apache.commons.text.StringEscapeUtils
|
||||
import top.jie65535.mirai.JChatGPT
|
||||
import top.jie65535.mirai.PluginConfig
|
||||
|
||||
class WebSearch : BaseAgent(
|
||||
@ -21,33 +22,6 @@ class WebSearch : BaseAgent(
|
||||
put("type", "string")
|
||||
put("description", "查询内容关键字")
|
||||
}
|
||||
putJsonObject("categories") {
|
||||
put("type", "array")
|
||||
putJsonObject("items") {
|
||||
put("type", "string")
|
||||
putJsonArray("enum") {
|
||||
add("general")
|
||||
add("images")
|
||||
add("videos")
|
||||
add("news")
|
||||
add("music")
|
||||
add("it")
|
||||
add("science")
|
||||
add("files")
|
||||
add("social_media")
|
||||
}
|
||||
}
|
||||
put("description", "可选择多项查询分类,通常情况下不传或用general即可。")
|
||||
}
|
||||
putJsonObject("time_range") {
|
||||
put("type", "string")
|
||||
putJsonArray("enum") {
|
||||
add("day")
|
||||
add("month")
|
||||
add("year")
|
||||
}
|
||||
put("description", "可选择获取最新消息,例如day表示只查询最近一天相关信息,以此类推。")
|
||||
}
|
||||
}
|
||||
putJsonArray("required") {
|
||||
add("q")
|
||||
@ -67,25 +41,17 @@ class WebSearch : BaseAgent(
|
||||
override suspend fun execute(args: JsonObject?): String {
|
||||
requireNotNull(args)
|
||||
val q = args.getValue("q").jsonPrimitive.content
|
||||
val categories = args["categories"]?.jsonArray
|
||||
val timeRange = args["time_range"]?.jsonPrimitive?.contentOrNull
|
||||
val response = httpClient.get(
|
||||
buildString {
|
||||
append(PluginConfig.searXngUrl)
|
||||
append("?q=")
|
||||
append(q.encodeURLParameter())
|
||||
append("&format=json")
|
||||
if (categories != null) {
|
||||
append("&")
|
||||
append(categories.joinToString { it.jsonPrimitive.content })
|
||||
}
|
||||
if (timeRange != null) {
|
||||
append("&")
|
||||
append(timeRange)
|
||||
}
|
||||
}
|
||||
)
|
||||
val url = buildString {
|
||||
append(PluginConfig.searXngUrl)
|
||||
append("?q=")
|
||||
append(q.encodeURLParameter())
|
||||
append("&format=json")
|
||||
}
|
||||
|
||||
val response = httpClient.get(url)
|
||||
JChatGPT.logger.info("Request: $url")
|
||||
val body = response.bodyAsText()
|
||||
JChatGPT.logger.info("Response: $body")
|
||||
val responseJsonElement = Json.parseToJsonElement(body)
|
||||
val filteredResponse = buildJsonObject {
|
||||
val root = responseJsonElement.jsonObject
|
||||
|
Loading…
Reference in New Issue
Block a user