feat(boards): micropython下添加对openai chat接口的支持

This commit is contained in:
王立帮
2025-04-22 01:53:11 +08:00
parent 7bf049fe3e
commit a1bfd806fa
11 changed files with 737 additions and 160 deletions

View File

@@ -619,7 +619,7 @@ export const iot_onenetdisconnect = iot_onenet_disconnect;
export const iot_checkonenet = iot_onenet_check;
export const iot_publish = iot_onenet_publish;
export const IOT_CONNECT_OLLAMA = {
export const iot_connect_ollama = {
init: function () {
this.setColour(IOT_HUE);
this.appendDummyInput()
@@ -638,6 +638,30 @@ export const IOT_CONNECT_OLLAMA = {
}
};
export const IOT_CONNECT_OLLAMA = iot_connect_ollama;
export const iot_connect_openai = {
init: function () {
this.setColour(IOT_HUE);
this.appendDummyInput()
.appendField(Blockly.Msg.MIXLY_CONNECT_OPENAI);
this.appendValueInput('SERVER')
.appendField(Blockly.Msg.MIXLY_EMQX_SERVER)
.setAlign(Blockly.inputs.Align.RIGHT);
this.appendValueInput('KEY')
.appendField(Blockly.Msg.MIXLY_API_PRIVATE_KEY)
.setAlign(Blockly.inputs.Align.RIGHT);
this.appendValueInput('NAME')
.appendField(Blockly.Msg.MODEL_NAME)
.setAlign(Blockly.inputs.Align.RIGHT);
this.appendValueInput('NUMBER')
.appendField(Blockly.Msg.MIXLY_SET_MAXIMUM_HISTORICAL_SESSIONS_NUM);
this.setInputsInline(false);
this.setPreviousStatement(true);
this.setNextStatement(true);
}
};
export const use_ollama_llm_to_chat = {
init: function () {
this.setColour(IOT_HUE);

View File

@@ -158,7 +158,7 @@ export const IOT_EMQX_INIT_AND_CONNECT_BY_SHARE_CODE = function (_, generator) {
// var mac_address = 'str(binascii.hexlify(wifi.radio.mac_address))[2:14]'
// var socket_pool = 'socketpool.SocketPool(wifi.radio)'
// var ssl_context = 'ssl.create_default_context()'
var code = 'sk = analyse_sharekey(\'http://'+server.substring(1, server.length-1)+'/mixio-php/sharekey.php?sk=' + share_code + '\')\n'+
var code = 'sk = analyse_sharekey(\'http://' + server.substring(1, server.length - 1) + '/mixio-php/sharekey.php?sk=' + share_code + '\')\n' +
'MQTT_USR_PRJ = sk[0]+\'/\'+sk[1]+\'/\'\n' +
'mqtt_client = mixiot.init_MQTT_client(' + server + ', sk[0], sk[2]' + ', MQTT_USR_PRJ)\n';
return code;
@@ -235,18 +235,28 @@ export const iot_mqtt_data = function (_, generator) {
return [code, generator.ORDER_ATOMIC];
}
export const IOT_CONNECT_OLLAMA = function(_,generator) {
generator.definitions_['import_Ollama'] = "from ollama import Ollama";
export const iot_connect_ollama = function (_, generator) {
generator.definitions_['import_ollama'] = "from ollama import Ollama";
var ser = generator.valueToCode(this, 'SERVER', generator.ORDER_ATOMIC);
var name = generator.valueToCode(this, 'NAME', generator.ORDER_ATOMIC);
var num = generator.valueToCode(this, 'NUMBER', generator.ORDER_ATOMIC);
var code = 'llm = Ollama(' + ser + ', ' + name + ', '+ num +')\n';
var code = 'llm = Ollama(' + ser + ', ' + name + ', ' + num + ')\n';
return code;
}
export const IOT_CONNECT_OLLAMA = iot_connect_ollama;
export const iot_connect_openai = function (_, generator) {
generator.definitions_['import_openai'] = "from openai import OpenAI";
var ser = generator.valueToCode(this, 'SERVER', generator.ORDER_ATOMIC);
var key = generator.valueToCode(this, 'KEY', generator.ORDER_ATOMIC);
var name = generator.valueToCode(this, 'NAME', generator.ORDER_ATOMIC);
var num = generator.valueToCode(this, 'NUMBER', generator.ORDER_ATOMIC);
var code = `llm = OpenAI(${ser}, ${key}, ${name}, ${num})\n`;
return code;
}
export const use_ollama_llm_to_chat = function (_, generator) {
generator.definitions_['import_Ollama'] = "from ollama import Ollama";
var topic = generator.valueToCode(this, 'TOPIC', generator.ORDER_ATOMIC);
var method = generator.valueToCode(this, 'METHOD', generator.ORDER_ATOMIC);
var code = 'llm.chat(' + topic + ', ' + method + ')\n';
@@ -254,48 +264,41 @@ export const use_ollama_llm_to_chat = function (_, generator) {
}
export const use_ollama_llm_to_chat_return = function (_, generator) {
generator.definitions_['import_Ollama'] = "from ollama import Ollama";
var topic = generator.valueToCode(this, 'TOPIC', generator.ORDER_ATOMIC);
var code = 'llm.chat(' + topic + ')';
return [code,generator.ORDER_ATOMIC];
return [code, generator.ORDER_ATOMIC];
}
// export const ollama_set_timeout = function (_,generator) {
// generator.definitions_['import_Ollama'] = "from ollama import Ollama";
// var t = generator.valueToCode(this, 'VAR', generator.ORDER_ATOMIC);
// var code = 'llm.set_timeout(' + t + ')\n';
// return code;
// }
// export const ollama_set_max_retries = function (_,generator) {
// generator.definitions_['import_Ollama'] = "from ollama import Ollama";
// var t = generator.valueToCode(this, 'VAR', generator.ORDER_ATOMIC);
// var code = 'llm.set_max_retries(' + t + ')\n';
// return code;
// }
// export const ollama_set_custom_url = function (_,generator) {
// generator.definitions_['import_Ollama'] = "from ollama import Ollama";
// var t = generator.valueToCode(this, 'TEXT', generator.ORDER_ATOMIC);
// var code = 'llm.set_custom_url(' + t + ')\n';
// return code;
// }
// export const ollama_select_model = function (_,generator) {
// generator.definitions_['import_Ollama'] = "from ollama import Ollama";
// var t = generator.valueToCode(this, 'TEXT', generator.ORDER_ATOMIC);
// var code = 'llm.select_model(' + t + ')\n';
// return code;
// }
// export const ollama_clear_user_history = function (_,generator) {
// generator.definitions_['import_Ollama'] = "from ollama import Ollama";
// var code = 'llm.clear_user_history()\n';
// return code;
// }
export const ollama_empty_history = function (_,generator) {
generator.definitions_['import_Ollama'] = "from ollama import Ollama";
export const ollama_empty_history = function () {
var code = 'llm.empty_history()\n';
return code;
}

View File

@@ -60,7 +60,7 @@ class Ollama():
self._url, headers=self._heads, data=data)
if response.status_code == 200:
break
time.slee(1)
time.sleep(1)
output = ""
@@ -101,7 +101,7 @@ class Ollama():
self.add_history("assistant", content)
messages_len = len(self._messages)
history_num = 2 * self._max_history_num
while history_num < len(self._messages):
while history_num < messages_len:
del self._messages[0]
else:
self.clear_user_history()

View File

@@ -0,0 +1,53 @@
import urequests
import time
import json
import ollama
class OpenAI(ollama.Ollama):
def __init__(self, url="", api_key="", model="", max_history_num=0, max_tokens=1024):
super().__init__(url, model, max_history_num)
self._heads["Authorization"] = "Bearer {}".format(api_key)
self._data["max_tokens"] = max_tokens
self._chat_url = "{}/chat/completions".format(self._url)
def _post(self, content_callback=None):
response = None
data = json.dumps(self._data).encode('utf-8')
for i in range(0, self._max_retries):
response = urequests.post(
self._chat_url, headers=self._heads, data=data)
if response.status_code == 200:
break
time.sleep(1)
output = ""
if response.status_code != 200:
output = response.text
if content_callback:
content_callback(output)
return output
if not content_callback:
output = json.loads(response.text)[
"choices"][0]["message"]["content"]
response.close()
return output
try:
while True:
line = response.raw.readline()
if line[:5] != b"data:":
continue
if line[-7:-1] == b"[DONE]":
break
line = line[6:-1]
line = line.decode('utf-8').strip()
data = json.loads(line)
content = data["choices"][0]["delta"]["content"]
content_callback(content)
output += content
finally:
response.close()
return output