feat(boards): micropython下添加对openai chat接口的支持

This commit is contained in:
王立帮
2025-04-22 01:53:11 +08:00
parent 7bf049fe3e
commit a1bfd806fa
11 changed files with 737 additions and 160 deletions

View File

@@ -619,7 +619,7 @@ export const iot_onenetdisconnect = iot_onenet_disconnect;
export const iot_checkonenet = iot_onenet_check; export const iot_checkonenet = iot_onenet_check;
export const iot_publish = iot_onenet_publish; export const iot_publish = iot_onenet_publish;
export const IOT_CONNECT_OLLAMA = { export const iot_connect_ollama = {
init: function () { init: function () {
this.setColour(IOT_HUE); this.setColour(IOT_HUE);
this.appendDummyInput() this.appendDummyInput()
@@ -638,6 +638,30 @@ export const IOT_CONNECT_OLLAMA = {
} }
}; };
export const IOT_CONNECT_OLLAMA = iot_connect_ollama;
export const iot_connect_openai = {
init: function () {
this.setColour(IOT_HUE);
this.appendDummyInput()
.appendField(Blockly.Msg.MIXLY_CONNECT_OPENAI);
this.appendValueInput('SERVER')
.appendField(Blockly.Msg.MIXLY_EMQX_SERVER)
.setAlign(Blockly.inputs.Align.RIGHT);
this.appendValueInput('KEY')
.appendField(Blockly.Msg.MIXLY_API_PRIVATE_KEY)
.setAlign(Blockly.inputs.Align.RIGHT);
this.appendValueInput('NAME')
.appendField(Blockly.Msg.MODEL_NAME)
.setAlign(Blockly.inputs.Align.RIGHT);
this.appendValueInput('NUMBER')
.appendField(Blockly.Msg.MIXLY_SET_MAXIMUM_HISTORICAL_SESSIONS_NUM);
this.setInputsInline(false);
this.setPreviousStatement(true);
this.setNextStatement(true);
}
};
export const use_ollama_llm_to_chat = { export const use_ollama_llm_to_chat = {
init: function () { init: function () {
this.setColour(IOT_HUE); this.setColour(IOT_HUE);

View File

@@ -235,18 +235,28 @@ export const iot_mqtt_data = function (_, generator) {
return [code, generator.ORDER_ATOMIC]; return [code, generator.ORDER_ATOMIC];
} }
export const IOT_CONNECT_OLLAMA = function(_,generator) { export const iot_connect_ollama = function (_, generator) {
generator.definitions_['import_Ollama'] = "from ollama import Ollama"; generator.definitions_['import_ollama'] = "from ollama import Ollama";
var ser = generator.valueToCode(this, 'SERVER', generator.ORDER_ATOMIC); var ser = generator.valueToCode(this, 'SERVER', generator.ORDER_ATOMIC);
var name = generator.valueToCode(this, 'NAME', generator.ORDER_ATOMIC); var name = generator.valueToCode(this, 'NAME', generator.ORDER_ATOMIC);
var num = generator.valueToCode(this, 'NUMBER', generator.ORDER_ATOMIC); var num = generator.valueToCode(this, 'NUMBER', generator.ORDER_ATOMIC);
var code = 'llm = Ollama(' + ser + ', ' + name + ', ' + num + ')\n'; var code = 'llm = Ollama(' + ser + ', ' + name + ', ' + num + ')\n';
return code; return code;
}
export const IOT_CONNECT_OLLAMA = iot_connect_ollama;
export const iot_connect_openai = function (_, generator) {
generator.definitions_['import_openai'] = "from openai import OpenAI";
var ser = generator.valueToCode(this, 'SERVER', generator.ORDER_ATOMIC);
var key = generator.valueToCode(this, 'KEY', generator.ORDER_ATOMIC);
var name = generator.valueToCode(this, 'NAME', generator.ORDER_ATOMIC);
var num = generator.valueToCode(this, 'NUMBER', generator.ORDER_ATOMIC);
var code = `llm = OpenAI(${ser}, ${key}, ${name}, ${num})\n`;
return code;
} }
export const use_ollama_llm_to_chat = function (_, generator) { export const use_ollama_llm_to_chat = function (_, generator) {
generator.definitions_['import_Ollama'] = "from ollama import Ollama";
var topic = generator.valueToCode(this, 'TOPIC', generator.ORDER_ATOMIC); var topic = generator.valueToCode(this, 'TOPIC', generator.ORDER_ATOMIC);
var method = generator.valueToCode(this, 'METHOD', generator.ORDER_ATOMIC); var method = generator.valueToCode(this, 'METHOD', generator.ORDER_ATOMIC);
var code = 'llm.chat(' + topic + ', ' + method + ')\n'; var code = 'llm.chat(' + topic + ', ' + method + ')\n';
@@ -254,48 +264,41 @@ export const use_ollama_llm_to_chat = function (_, generator) {
} }
export const use_ollama_llm_to_chat_return = function (_, generator) { export const use_ollama_llm_to_chat_return = function (_, generator) {
generator.definitions_['import_Ollama'] = "from ollama import Ollama";
var topic = generator.valueToCode(this, 'TOPIC', generator.ORDER_ATOMIC); var topic = generator.valueToCode(this, 'TOPIC', generator.ORDER_ATOMIC);
var code = 'llm.chat(' + topic + ')'; var code = 'llm.chat(' + topic + ')';
return [code, generator.ORDER_ATOMIC]; return [code, generator.ORDER_ATOMIC];
} }
// export const ollama_set_timeout = function (_,generator) { // export const ollama_set_timeout = function (_,generator) {
// generator.definitions_['import_Ollama'] = "from ollama import Ollama";
// var t = generator.valueToCode(this, 'VAR', generator.ORDER_ATOMIC); // var t = generator.valueToCode(this, 'VAR', generator.ORDER_ATOMIC);
// var code = 'llm.set_timeout(' + t + ')\n'; // var code = 'llm.set_timeout(' + t + ')\n';
// return code; // return code;
// } // }
// export const ollama_set_max_retries = function (_,generator) { // export const ollama_set_max_retries = function (_,generator) {
// generator.definitions_['import_Ollama'] = "from ollama import Ollama";
// var t = generator.valueToCode(this, 'VAR', generator.ORDER_ATOMIC); // var t = generator.valueToCode(this, 'VAR', generator.ORDER_ATOMIC);
// var code = 'llm.set_max_retries(' + t + ')\n'; // var code = 'llm.set_max_retries(' + t + ')\n';
// return code; // return code;
// } // }
// export const ollama_set_custom_url = function (_,generator) { // export const ollama_set_custom_url = function (_,generator) {
// generator.definitions_['import_Ollama'] = "from ollama import Ollama";
// var t = generator.valueToCode(this, 'TEXT', generator.ORDER_ATOMIC); // var t = generator.valueToCode(this, 'TEXT', generator.ORDER_ATOMIC);
// var code = 'llm.set_custom_url(' + t + ')\n'; // var code = 'llm.set_custom_url(' + t + ')\n';
// return code; // return code;
// } // }
// export const ollama_select_model = function (_,generator) { // export const ollama_select_model = function (_,generator) {
// generator.definitions_['import_Ollama'] = "from ollama import Ollama";
// var t = generator.valueToCode(this, 'TEXT', generator.ORDER_ATOMIC); // var t = generator.valueToCode(this, 'TEXT', generator.ORDER_ATOMIC);
// var code = 'llm.select_model(' + t + ')\n'; // var code = 'llm.select_model(' + t + ')\n';
// return code; // return code;
// } // }
// export const ollama_clear_user_history = function (_,generator) { // export const ollama_clear_user_history = function (_,generator) {
// generator.definitions_['import_Ollama'] = "from ollama import Ollama";
// var code = 'llm.clear_user_history()\n'; // var code = 'llm.clear_user_history()\n';
// return code; // return code;
// } // }
export const ollama_empty_history = function (_,generator) { export const ollama_empty_history = function () {
generator.definitions_['import_Ollama'] = "from ollama import Ollama";
var code = 'llm.empty_history()\n'; var code = 'llm.empty_history()\n';
return code; return code;
} }

View File

@@ -60,7 +60,7 @@ class Ollama():
self._url, headers=self._heads, data=data) self._url, headers=self._heads, data=data)
if response.status_code == 200: if response.status_code == 200:
break break
time.slee(1) time.sleep(1)
output = "" output = ""
@@ -101,7 +101,7 @@ class Ollama():
self.add_history("assistant", content) self.add_history("assistant", content)
messages_len = len(self._messages) messages_len = len(self._messages)
history_num = 2 * self._max_history_num history_num = 2 * self._max_history_num
while history_num < len(self._messages): while history_num < messages_len:
del self._messages[0] del self._messages[0]
else: else:
self.clear_user_history() self.clear_user_history()

View File

@@ -0,0 +1,53 @@
import urequests
import time
import json
import ollama
class OpenAI(ollama.Ollama):
def __init__(self, url="", api_key="", model="", max_history_num=0, max_tokens=1024):
super().__init__(url, model, max_history_num)
self._heads["Authorization"] = "Bearer {}".format(api_key)
self._data["max_tokens"] = max_tokens
self._chat_url = "{}/chat/completions".format(self._url)
def _post(self, content_callback=None):
response = None
data = json.dumps(self._data).encode('utf-8')
for i in range(0, self._max_retries):
response = urequests.post(
self._chat_url, headers=self._heads, data=data)
if response.status_code == 200:
break
time.sleep(1)
output = ""
if response.status_code != 200:
output = response.text
if content_callback:
content_callback(output)
return output
if not content_callback:
output = json.loads(response.text)[
"choices"][0]["message"]["content"]
response.close()
return output
try:
while True:
line = response.raw.readline()
if line[:5] != b"data:":
continue
if line[-7:-1] == b"[DONE]":
break
line = line[6:-1]
line = line.decode('utf-8').strip()
data = json.loads(line)
content = data["choices"][0]["delta"]["content"]
content_callback(content)
output += content
finally:
response.close()
return output

View File

@@ -7241,6 +7241,85 @@
</shadow> </shadow>
</value> </value>
</block> </block>
<block type="iot_connect_ollama">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">192.168.1.1</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">qwen2.5-coder:0.5b</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="iot_connect_openai">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">https://api.deepseek.com</field>
</shadow>
</value>
<value name="KEY">
<shadow type="text">
<field name="TEXT">API Key</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">deepseek-chat</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="use_ollama_llm_to_chat">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
<value name="METHOD">
<shadow type="factory_block_return">
<field name="VALUE">content_callback</field>
</shadow>
</value>
</block>
<block type="procedures_defnoreturn">
<mutation>
<arg name="content"></arg>
</mutation>
<field name="NAME">content_callback</field>
<statement name="STACK">
<block type="system_print_end">
<value name="VAR">
<block type="variables_get">
<field name="VAR">content</field>
</block>
</value>
<value name="END">
<shadow type="text">
<field name="TEXT"></field>
</shadow>
</value>
</block>
</statement>
</block>
<block type="use_ollama_llm_to_chat_return">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
</block>
<block type="ollama_empty_history"></block>
</category> </category>
<category id="catFactory" name="Factory" colour="#777777"> <category id="catFactory" name="Factory" colour="#777777">

View File

@@ -7070,6 +7070,85 @@
</shadow> </shadow>
</value> </value>
</block> </block>
<block type="iot_connect_ollama">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">192.168.1.1</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">qwen2.5-coder:0.5b</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="iot_connect_openai">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">https://api.deepseek.com</field>
</shadow>
</value>
<value name="KEY">
<shadow type="text">
<field name="TEXT">API Key</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">deepseek-chat</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="use_ollama_llm_to_chat">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
<value name="METHOD">
<shadow type="factory_block_return">
<field name="VALUE">content_callback</field>
</shadow>
</value>
</block>
<block type="procedures_defnoreturn">
<mutation>
<arg name="content"></arg>
</mutation>
<field name="NAME">content_callback</field>
<statement name="STACK">
<block type="system_print_end">
<value name="VAR">
<block type="variables_get">
<field name="VAR">content</field>
</block>
</value>
<value name="END">
<shadow type="text">
<field name="TEXT"></field>
</shadow>
</value>
</block>
</statement>
</block>
<block type="use_ollama_llm_to_chat_return">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
</block>
<block type="ollama_empty_history"></block>
</category> </category>
<category id="catFactory" name="Factory" colour="#777777"> <category id="catFactory" name="Factory" colour="#777777">
<block type="factory_import"></block> <block type="factory_import"></block>

View File

@@ -6961,7 +6961,7 @@
</shadow> </shadow>
</value> </value>
</block> </block>
<block type="IOT_CONNECT_OLLAMA"> <block type="iot_connect_ollama">
<value name="SERVER"> <value name="SERVER">
<shadow type="text"> <shadow type="text">
<field name="TEXT">192.168.1.1</field> <field name="TEXT">192.168.1.1</field>
@@ -6978,6 +6978,28 @@
</shadow> </shadow>
</value> </value>
</block> </block>
<block type="iot_connect_openai">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">https://api.deepseek.com</field>
</shadow>
</value>
<value name="KEY">
<shadow type="text">
<field name="TEXT">API Key</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">deepseek-chat</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="use_ollama_llm_to_chat"> <block type="use_ollama_llm_to_chat">
<value name="TOPIC"> <value name="TOPIC">
<shadow type="text"> <shadow type="text">

View File

@@ -1771,7 +1771,8 @@
<block type="image_arithmetic"> <block type="image_arithmetic">
<value name="A"> <value name="A">
<shadow type="pins_builtinimg"></shadow> <shadow type="pins_builtinimg"></shadow>
</value> <value name="B"> </value>
<value name="B">
<shadow type="pins_builtinimg"></shadow> <shadow type="pins_builtinimg"></shadow>
</value> </value>
</block> </block>
@@ -6874,6 +6875,85 @@
</shadow> </shadow>
</value> </value>
</block> </block>
<block type="iot_connect_ollama">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">192.168.1.1</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">qwen2.5-coder:0.5b</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="iot_connect_openai">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">https://api.deepseek.com</field>
</shadow>
</value>
<value name="KEY">
<shadow type="text">
<field name="TEXT">API Key</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">deepseek-chat</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="use_ollama_llm_to_chat">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
<value name="METHOD">
<shadow type="factory_block_return">
<field name="VALUE">content_callback</field>
</shadow>
</value>
</block>
<block type="procedures_defnoreturn">
<mutation>
<arg name="content"></arg>
</mutation>
<field name="NAME">content_callback</field>
<statement name="STACK">
<block type="system_print_end">
<value name="VAR">
<block type="variables_get">
<field name="VAR">content</field>
</block>
</value>
<value name="END">
<shadow type="text">
<field name="TEXT"></field>
</shadow>
</value>
</block>
</statement>
</block>
<block type="use_ollama_llm_to_chat_return">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
</block>
<block type="ollama_empty_history"></block>
</category> </category>
<category id="catFactory" name="Factory" colour="#777777"> <category id="catFactory" name="Factory" colour="#777777">
<block type="factory_import"></block> <block type="factory_import"></block>

View File

@@ -6605,6 +6605,85 @@
</shadow> </shadow>
</value> </value>
</block> </block>
<block type="iot_connect_ollama">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">192.168.1.1</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">qwen2.5-coder:0.5b</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="iot_connect_openai">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">https://api.deepseek.com</field>
</shadow>
</value>
<value name="KEY">
<shadow type="text">
<field name="TEXT">API Key</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">deepseek-chat</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="use_ollama_llm_to_chat">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
<value name="METHOD">
<shadow type="factory_block_return">
<field name="VALUE">content_callback</field>
</shadow>
</value>
</block>
<block type="procedures_defnoreturn">
<mutation>
<arg name="content"></arg>
</mutation>
<field name="NAME">content_callback</field>
<statement name="STACK">
<block type="system_print_end">
<value name="VAR">
<block type="variables_get">
<field name="VAR">content</field>
</block>
</value>
<value name="END">
<shadow type="text">
<field name="TEXT"></field>
</shadow>
</value>
</block>
</statement>
</block>
<block type="use_ollama_llm_to_chat_return">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
</block>
<block type="ollama_empty_history"></block>
</category> </category>
<category id="catFactory" name="Factory" colour="#777777"> <category id="catFactory" name="Factory" colour="#777777">
<block type="factory_import"></block> <block type="factory_import"></block>

View File

@@ -7290,6 +7290,85 @@
</shadow> </shadow>
</value> </value>
</block> </block>
<block type="iot_connect_ollama">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">192.168.1.1</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">qwen2.5-coder:0.5b</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="iot_connect_openai">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">https://api.deepseek.com</field>
</shadow>
</value>
<value name="KEY">
<shadow type="text">
<field name="TEXT">API Key</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">deepseek-chat</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="use_ollama_llm_to_chat">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
<value name="METHOD">
<shadow type="factory_block_return">
<field name="VALUE">content_callback</field>
</shadow>
</value>
</block>
<block type="procedures_defnoreturn">
<mutation>
<arg name="content"></arg>
</mutation>
<field name="NAME">content_callback</field>
<statement name="STACK">
<block type="system_print_end">
<value name="VAR">
<block type="variables_get">
<field name="VAR">content</field>
</block>
</value>
<value name="END">
<shadow type="text">
<field name="TEXT"></field>
</shadow>
</value>
</block>
</statement>
</block>
<block type="use_ollama_llm_to_chat_return">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
</block>
<block type="ollama_empty_history"></block>
</category> </category>
<category id="catFactory" name="Factory" colour="#777777"> <category id="catFactory" name="Factory" colour="#777777">
<block type="factory_import"></block> <block type="factory_import"></block>

View File

@@ -5675,6 +5675,85 @@
</shadow> </shadow>
</value> </value>
</block> </block>
<block type="iot_connect_ollama">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">192.168.1.1</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">qwen2.5-coder:0.5b</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="iot_connect_openai">
<value name="SERVER">
<shadow type="text">
<field name="TEXT">https://api.deepseek.com</field>
</shadow>
</value>
<value name="KEY">
<shadow type="text">
<field name="TEXT">API Key</field>
</shadow>
</value>
<value name="NAME">
<shadow type="text">
<field name="TEXT">deepseek-chat</field>
</shadow>
</value>
<value name="NUMBER">
<shadow type="math_number">
<field name="NUM">5</field>
</shadow>
</value>
</block>
<block type="use_ollama_llm_to_chat">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
<value name="METHOD">
<shadow type="factory_block_return">
<field name="VALUE">content_callback</field>
</shadow>
</value>
</block>
<block type="procedures_defnoreturn">
<mutation>
<arg name="content"></arg>
</mutation>
<field name="NAME">content_callback</field>
<statement name="STACK">
<block type="system_print_end">
<value name="VAR">
<block type="variables_get">
<field name="VAR">content</field>
</block>
</value>
<value name="END">
<shadow type="text">
<field name="TEXT"></field>
</shadow>
</value>
</block>
</statement>
</block>
<block type="use_ollama_llm_to_chat_return">
<value name="TOPIC">
<shadow type="text">
<field name="TEXT">请介绍一下米思齐?</field>
</shadow>
</value>
</block>
<block type="ollama_empty_history"></block>
</category> </category>
<category id="catFactory" name="Factory" colour="#777777"> <category id="catFactory" name="Factory" colour="#777777">
<block type="factory_import"></block> <block type="factory_import"></block>