Spaces:
Running
Running
owenkaplinsky
commited on
Commit
·
59bfe9e
1
Parent(s):
5f90a0c
Add variable viewing; var block creation
Browse files- project/blocks.txt +11 -3
- project/chat.py +19 -1
- project/src/index.js +22 -9
- project/test.py +1 -1
project/blocks.txt
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
llm_call(inputs(MODEL: "gpt-3.5-turbo-0125/gpt-4o-2024-08-06/gpt-5-mini-2025-08-07/gpt-5-2025-08-07/gpt-4o-search-preview-2025-03-11", PROMPT: value))
|
| 3 |
|
| 4 |
# JSON and API
|
| 5 |
-
in_json(inputs(NAME: value, JSON: value)) //
|
| 6 |
make_json(inputs(KEYN: value, FIELDN: value)) // N starts at 0; you can make as many N as you want. Each key goes with one field
|
| 7 |
-
call_api(inputs(METHOD: "GET/POST/PUT/DELETE", URL: value, HEADERS: value)) //
|
| 8 |
|
| 9 |
# Logic
|
| 10 |
controls_if(inputs(IFN: value)) // N starts at 0; you can make as many N as you want
|
|
@@ -17,6 +17,8 @@ logic_operation(inputs(OP: "AND/OR", A: value, B: value))
|
|
| 17 |
# Loops
|
| 18 |
controls_repeat_ext(inputs(TIMES: value))
|
| 19 |
controls_whileUntil(inputs(MODE: "WHILE/UNTIL", BOOL: value))
|
|
|
|
|
|
|
| 20 |
controls_flow_statements(inputs(FLOW: "CONTINUE/BREAK")) // Must go **inside** of a loop
|
| 21 |
|
| 22 |
# Math
|
|
@@ -43,6 +45,12 @@ text_trim(inputs(MODE: "BOTH/LEFT/RIGHT", TEXT: value)) // Trim spaces from text
|
|
| 43 |
# Lists
|
| 44 |
lists_length(inputs(VALUE: value))
|
| 45 |
lists_isEmpty(inputs(VALUE: value)) // Boolean output
|
|
|
|
| 46 |
lists_reverse(inputs(LIST: value))
|
| 47 |
lists_create_with(inputs(ADDN: value)) // N starts at 0; you can make as many N as you want
|
| 48 |
-
lists_sort(inputs(TYPE: "NUMERIC/TEXT/IGNORE_CASE", DIRECTION: "1/-1")) // For direction, 1 is ascending, -1 is descending
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
llm_call(inputs(MODEL: "gpt-3.5-turbo-0125/gpt-4o-2024-08-06/gpt-5-mini-2025-08-07/gpt-5-2025-08-07/gpt-4o-search-preview-2025-03-11", PROMPT: value))
|
| 3 |
|
| 4 |
# JSON and API
|
| 5 |
+
in_json(inputs(NAME: value, JSON: value)) // NAME is the value you want to extract from the JSON
|
| 6 |
make_json(inputs(KEYN: value, FIELDN: value)) // N starts at 0; you can make as many N as you want. Each key goes with one field
|
| 7 |
+
call_api(inputs(METHOD: "GET/POST/PUT/DELETE", URL: value, HEADERS: value)) // HEADERS is optional
|
| 8 |
|
| 9 |
# Logic
|
| 10 |
controls_if(inputs(IFN: value)) // N starts at 0; you can make as many N as you want
|
|
|
|
| 17 |
# Loops
|
| 18 |
controls_repeat_ext(inputs(TIMES: value))
|
| 19 |
controls_whileUntil(inputs(MODE: "WHILE/UNTIL", BOOL: value))
|
| 20 |
+
controls_for(inputs(VAR: value, FROM: value, TO: value, BY: value)) // VAR is a variable ID. BY is the increment amount
|
| 21 |
+
controls_forEach(inputs(VAR: value), LIST: value) // VAR is a variable ID
|
| 22 |
controls_flow_statements(inputs(FLOW: "CONTINUE/BREAK")) // Must go **inside** of a loop
|
| 23 |
|
| 24 |
# Math
|
|
|
|
| 45 |
# Lists
|
| 46 |
lists_length(inputs(VALUE: value))
|
| 47 |
lists_isEmpty(inputs(VALUE: value)) // Boolean output
|
| 48 |
+
lists_indexOf(inputs(END: "FIRST/LAST", VALUE: value, FIND: value)) // VALUE is a variable ID
|
| 49 |
lists_reverse(inputs(LIST: value))
|
| 50 |
lists_create_with(inputs(ADDN: value)) // N starts at 0; you can make as many N as you want
|
| 51 |
+
lists_sort(inputs(TYPE: "NUMERIC/TEXT/IGNORE_CASE", DIRECTION: "1/-1")) // For direction, 1 is ascending, -1 is descending
|
| 52 |
+
|
| 53 |
+
# Variables
|
| 54 |
+
variables_get(inputs(VAR: value)) // VAR is a variable ID
|
| 55 |
+
variables_set(inputs(VAR: value, VALUE: value)) // VAR is a variable ID
|
| 56 |
+
math_change(inputs(VAR: value, DELTA: value)) // VAR is a variable ID
|
project/chat.py
CHANGED
|
@@ -23,6 +23,9 @@ stored_api_key = ""
|
|
| 23 |
# Global variable to store the latest chat context
|
| 24 |
latest_blockly_chat_code = ""
|
| 25 |
|
|
|
|
|
|
|
|
|
|
| 26 |
# Queue for deletion requests and results storage
|
| 27 |
deletion_queue = queue.Queue()
|
| 28 |
deletion_results = {}
|
|
@@ -57,10 +60,12 @@ app.add_middleware(
|
|
| 57 |
|
| 58 |
@app.post("/update_chat")
|
| 59 |
async def update_chat(request: Request):
|
| 60 |
-
global latest_blockly_chat_code
|
| 61 |
data = await request.json()
|
| 62 |
latest_blockly_chat_code = data.get("code", "")
|
|
|
|
| 63 |
print("\n[FASTAPI] Updated Blockly chat code:\n", latest_blockly_chat_code)
|
|
|
|
| 64 |
return {"code": latest_blockly_chat_code}
|
| 65 |
|
| 66 |
@app.post("/set_api_key_chat")
|
|
@@ -647,6 +652,12 @@ if you want it to be blank.
|
|
| 647 |
When creating blocks, you are unable to put an outputting block inside of another block
|
| 648 |
which already exists. If you are trying to nest input blocks, you must create them all
|
| 649 |
in one call.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 650 |
"""
|
| 651 |
|
| 652 |
tools = [
|
|
@@ -741,6 +752,8 @@ in one call.
|
|
| 741 |
# Get the chat context from the global variable
|
| 742 |
global latest_blockly_chat_code
|
| 743 |
context = latest_blockly_chat_code
|
|
|
|
|
|
|
| 744 |
|
| 745 |
# Convert history to OpenAI format
|
| 746 |
full_history = []
|
|
@@ -758,6 +771,11 @@ in one call.
|
|
| 758 |
else:
|
| 759 |
full_system_prompt += "\n\nNote: No Blockly workspace context is currently available."
|
| 760 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 761 |
# Allow up to 10 consecutive messages from the agent
|
| 762 |
accumulated_response = ""
|
| 763 |
max_iterations = 10
|
|
|
|
| 23 |
# Global variable to store the latest chat context
|
| 24 |
latest_blockly_chat_code = ""
|
| 25 |
|
| 26 |
+
# Global variable to store the workspace's variables
|
| 27 |
+
latest_blockly_vars = ""
|
| 28 |
+
|
| 29 |
# Queue for deletion requests and results storage
|
| 30 |
deletion_queue = queue.Queue()
|
| 31 |
deletion_results = {}
|
|
|
|
| 60 |
|
| 61 |
@app.post("/update_chat")
|
| 62 |
async def update_chat(request: Request):
|
| 63 |
+
global latest_blockly_chat_code, latest_blockly_vars
|
| 64 |
data = await request.json()
|
| 65 |
latest_blockly_chat_code = data.get("code", "")
|
| 66 |
+
latest_blockly_vars = data.get("varString", "")
|
| 67 |
print("\n[FASTAPI] Updated Blockly chat code:\n", latest_blockly_chat_code)
|
| 68 |
+
print("\n[FASTAPI] Updated Blockly variables:\n", latest_blockly_vars)
|
| 69 |
return {"code": latest_blockly_chat_code}
|
| 70 |
|
| 71 |
@app.post("/set_api_key_chat")
|
|
|
|
| 652 |
When creating blocks, you are unable to put an outputting block inside of another block
|
| 653 |
which already exists. If you are trying to nest input blocks, you must create them all
|
| 654 |
in one call.
|
| 655 |
+
|
| 656 |
+
### Variables
|
| 657 |
+
|
| 658 |
+
You will be given the current variables that are in the workspace. Like the blocks, you will see:
|
| 659 |
+
|
| 660 |
+
`varId | varName`
|
| 661 |
"""
|
| 662 |
|
| 663 |
tools = [
|
|
|
|
| 752 |
# Get the chat context from the global variable
|
| 753 |
global latest_blockly_chat_code
|
| 754 |
context = latest_blockly_chat_code
|
| 755 |
+
global latest_blockly_vars
|
| 756 |
+
vars = latest_blockly_vars
|
| 757 |
|
| 758 |
# Convert history to OpenAI format
|
| 759 |
full_history = []
|
|
|
|
| 771 |
else:
|
| 772 |
full_system_prompt += "\n\nNote: No Blockly workspace context is currently available."
|
| 773 |
|
| 774 |
+
if vars != "":
|
| 775 |
+
full_system_prompt += f"\n\nCurrent Blockly variables:\n{vars}"
|
| 776 |
+
else:
|
| 777 |
+
full_system_prompt += "\n\nNote: No Blockly variables are currently available."
|
| 778 |
+
|
| 779 |
# Allow up to 10 consecutive messages from the agent
|
| 780 |
accumulated_response = ""
|
| 781 |
max_iterations = 10
|
project/src/index.js
CHANGED
|
@@ -876,6 +876,9 @@ const updateCode = () => {
|
|
| 876 |
}
|
| 877 |
// Ignore any other top-level blocks (stray blocks)
|
| 878 |
}
|
|
|
|
|
|
|
|
|
|
| 879 |
|
| 880 |
const codeEl = document.querySelector('#generatedCode code');
|
| 881 |
|
|
@@ -943,13 +946,20 @@ let chatBackendAvailable = false;
|
|
| 943 |
let chatUpdateQueue = [];
|
| 944 |
let chatRetryTimeout = null;
|
| 945 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 946 |
// Function to check if chat backend is available
|
| 947 |
const checkChatBackend = async () => {
|
| 948 |
try {
|
| 949 |
const response = await fetch("http://127.0.0.1:7861/update_chat", {
|
| 950 |
method: "POST",
|
| 951 |
headers: { "Content-Type": "application/json" },
|
| 952 |
-
body: JSON.stringify({
|
|
|
|
|
|
|
|
|
|
| 953 |
});
|
| 954 |
if (response.ok) {
|
| 955 |
chatBackendAvailable = true;
|
|
@@ -974,12 +984,15 @@ const processChatUpdateQueue = () => {
|
|
| 974 |
};
|
| 975 |
|
| 976 |
// Send chat update with retry logic
|
| 977 |
-
const sendChatUpdate = async (
|
| 978 |
try {
|
| 979 |
const response = await fetch("http://127.0.0.1:7861/update_chat", {
|
| 980 |
method: "POST",
|
| 981 |
headers: { "Content-Type": "application/json" },
|
| 982 |
-
body: JSON.stringify({
|
|
|
|
|
|
|
|
|
|
| 983 |
});
|
| 984 |
|
| 985 |
if (response.ok) {
|
|
@@ -999,9 +1012,9 @@ const sendChatUpdate = async (code, retryCount = 0) => {
|
|
| 999 |
if (!chatBackendAvailable) {
|
| 1000 |
checkChatBackend().then(available => {
|
| 1001 |
if (available) {
|
| 1002 |
-
sendChatUpdate(
|
| 1003 |
} else if (retryCount < 4) {
|
| 1004 |
-
sendChatUpdate(
|
| 1005 |
}
|
| 1006 |
});
|
| 1007 |
}
|
|
@@ -1012,22 +1025,22 @@ const sendChatUpdate = async (code, retryCount = 0) => {
|
|
| 1012 |
|
| 1013 |
// Update function for the Chat generator (AI Chat tab)
|
| 1014 |
const updateChatCode = () => {
|
| 1015 |
-
|
| 1016 |
const codeEl = document.querySelector('#aichatCode code');
|
| 1017 |
|
| 1018 |
// You can add any chat-specific preprocessing here
|
| 1019 |
// For example, adding headers or formatting
|
| 1020 |
|
| 1021 |
if (codeEl) {
|
| 1022 |
-
codeEl.textContent =
|
| 1023 |
}
|
| 1024 |
|
| 1025 |
// If backend is available, send immediately
|
| 1026 |
if (chatBackendAvailable) {
|
| 1027 |
-
sendChatUpdate(
|
| 1028 |
} else {
|
| 1029 |
// Queue the update and try to establish connection
|
| 1030 |
-
chatUpdateQueue.push(
|
| 1031 |
|
| 1032 |
// Clear any existing retry timeout
|
| 1033 |
if (chatRetryTimeout) {
|
|
|
|
| 876 |
}
|
| 877 |
// Ignore any other top-level blocks (stray blocks)
|
| 878 |
}
|
| 879 |
+
|
| 880 |
+
const vars = ws.getVariableMap().getAllVariables();
|
| 881 |
+
globalVarString = vars.map(v => `${v.id} | ${v.name}`).join("\n");
|
| 882 |
|
| 883 |
const codeEl = document.querySelector('#generatedCode code');
|
| 884 |
|
|
|
|
| 946 |
let chatUpdateQueue = [];
|
| 947 |
let chatRetryTimeout = null;
|
| 948 |
|
| 949 |
+
// Global variables for chat code and variables
|
| 950 |
+
let globalChatCode = '';
|
| 951 |
+
let globalVarString = '';
|
| 952 |
+
|
| 953 |
// Function to check if chat backend is available
|
| 954 |
const checkChatBackend = async () => {
|
| 955 |
try {
|
| 956 |
const response = await fetch("http://127.0.0.1:7861/update_chat", {
|
| 957 |
method: "POST",
|
| 958 |
headers: { "Content-Type": "application/json" },
|
| 959 |
+
body: JSON.stringify({
|
| 960 |
+
code: globalChatCode,
|
| 961 |
+
varString: globalVarString
|
| 962 |
+
}),
|
| 963 |
});
|
| 964 |
if (response.ok) {
|
| 965 |
chatBackendAvailable = true;
|
|
|
|
| 984 |
};
|
| 985 |
|
| 986 |
// Send chat update with retry logic
|
| 987 |
+
const sendChatUpdate = async (chatCode, retryCount = 0) => {
|
| 988 |
try {
|
| 989 |
const response = await fetch("http://127.0.0.1:7861/update_chat", {
|
| 990 |
method: "POST",
|
| 991 |
headers: { "Content-Type": "application/json" },
|
| 992 |
+
body: JSON.stringify({
|
| 993 |
+
code: chatCode,
|
| 994 |
+
varString: globalVarString
|
| 995 |
+
}),
|
| 996 |
});
|
| 997 |
|
| 998 |
if (response.ok) {
|
|
|
|
| 1012 |
if (!chatBackendAvailable) {
|
| 1013 |
checkChatBackend().then(available => {
|
| 1014 |
if (available) {
|
| 1015 |
+
sendChatUpdate(chatCode, retryCount + 1);
|
| 1016 |
} else if (retryCount < 4) {
|
| 1017 |
+
sendChatUpdate(chatCode, retryCount + 1);
|
| 1018 |
}
|
| 1019 |
});
|
| 1020 |
}
|
|
|
|
| 1025 |
|
| 1026 |
// Update function for the Chat generator (AI Chat tab)
|
| 1027 |
const updateChatCode = () => {
|
| 1028 |
+
globalChatCode = chatGenerator.workspaceToCode(ws);
|
| 1029 |
const codeEl = document.querySelector('#aichatCode code');
|
| 1030 |
|
| 1031 |
// You can add any chat-specific preprocessing here
|
| 1032 |
// For example, adding headers or formatting
|
| 1033 |
|
| 1034 |
if (codeEl) {
|
| 1035 |
+
codeEl.textContent = globalChatCode;
|
| 1036 |
}
|
| 1037 |
|
| 1038 |
// If backend is available, send immediately
|
| 1039 |
if (chatBackendAvailable) {
|
| 1040 |
+
sendChatUpdate(globalChatCode);
|
| 1041 |
} else {
|
| 1042 |
// Queue the update and try to establish connection
|
| 1043 |
+
chatUpdateQueue.push(globalChatCode);
|
| 1044 |
|
| 1045 |
// Clear any existing retry timeout
|
| 1046 |
if (chatRetryTimeout) {
|
project/test.py
CHANGED
|
@@ -25,7 +25,7 @@ async def update_code(request: Request):
|
|
| 25 |
global latest_blockly_code
|
| 26 |
data = await request.json()
|
| 27 |
latest_blockly_code = data.get("code", "")
|
| 28 |
-
print("\n[FASTAPI] Updated Blockly code
|
| 29 |
return {"ok": True}
|
| 30 |
|
| 31 |
# Sends the latest code to chat.py so that the agent will be able to use the MCP
|
|
|
|
| 25 |
global latest_blockly_code
|
| 26 |
data = await request.json()
|
| 27 |
latest_blockly_code = data.get("code", "")
|
| 28 |
+
print("\n[FASTAPI] Updated Blockly code")
|
| 29 |
return {"ok": True}
|
| 30 |
|
| 31 |
# Sends the latest code to chat.py so that the agent will be able to use the MCP
|