first commit
commit
ba7994e91f
@ -0,0 +1,16 @@
|
||||
|
||||
|
||||
include $(TOPDIR)/rules.mk
|
||||
|
||||
|
||||
LUCI_TITLE:=LuCI support for autogpt
|
||||
LUCI_PKGARCH:=all
|
||||
LUCI_DEPENDS:=+lsblk +docker +luci-lib-taskd
|
||||
|
||||
define Package/luci-app-autogpt/conffiles
|
||||
/etc/config/autogpt
|
||||
endef
|
||||
|
||||
include $(TOPDIR)/feeds/luci/luci.mk
|
||||
|
||||
# call BuildPackage - OpenWrt buildroot signature
|
@ -0,0 +1,15 @@
|
||||
|
||||
module("luci.controller.autogpt", package.seeall)
|
||||
|
||||
function index()
|
||||
entry({"admin", "apps"}, firstchild(), _("Apps") , 45).dependent = false
|
||||
if not nixio.fs.access("/etc/config/autogpt") then
|
||||
return
|
||||
end
|
||||
|
||||
local page = entry({"admin", "apps", "autogpt"}, cbi("autogpt"), _("autogpt"))
|
||||
page.order = 10
|
||||
page.dependent = true
|
||||
page.acl_depends = { "luci-app-autogpt" }
|
||||
entry({"admin","apps","autogpt","status"},call("act_status")).leaf=true
|
||||
end
|
@ -0,0 +1,55 @@
|
||||
local util = require "luci.util"
|
||||
local jsonc = require "luci.jsonc"
|
||||
|
||||
local autogpt = {}
|
||||
|
||||
autogpt.blocks = function()
|
||||
local f = io.popen("lsblk -s -f -b -o NAME,FSSIZE,MOUNTPOINT --json", "r")
|
||||
local vals = {}
|
||||
if f then
|
||||
local ret = f:read("*all")
|
||||
f:close()
|
||||
local obj = jsonc.parse(ret)
|
||||
for _, val in pairs(obj["blockdevices"]) do
|
||||
local fsize = val["fssize"]
|
||||
if fsize ~= nil and string.len(fsize) > 10 and val["mountpoint"] then
|
||||
-- fsize > 1G
|
||||
vals[#vals+1] = val["mountpoint"]
|
||||
end
|
||||
end
|
||||
end
|
||||
return vals
|
||||
end
|
||||
|
||||
autogpt.home = function()
|
||||
local uci = require "luci.model.uci".cursor()
|
||||
local home_dirs = {}
|
||||
home_dirs["main_dir"] = uci:get_first("quickstart", "main", "main_dir", "/root")
|
||||
home_dirs["Configs"] = uci:get_first("quickstart", "main", "conf_dir", home_dirs["main_dir"].."/Configs")
|
||||
home_dirs["Public"] = uci:get_first("quickstart", "main", "pub_dir", home_dirs["main_dir"].."/Public")
|
||||
home_dirs["Downloads"] = uci:get_first("quickstart", "main", "dl_dir", home_dirs["Public"].."/Downloads")
|
||||
home_dirs["Caches"] = uci:get_first("quickstart", "main", "tmp_dir", home_dirs["main_dir"].."/Caches")
|
||||
return home_dirs
|
||||
end
|
||||
|
||||
autogpt.find_paths = function(blocks, home_dirs, path_name)
|
||||
local default_path = ''
|
||||
local configs = {}
|
||||
|
||||
default_path = home_dirs[path_name] .. "/autogpt"
|
||||
if #blocks == 0 then
|
||||
table.insert(configs, default_path)
|
||||
else
|
||||
for _, val in pairs(blocks) do
|
||||
table.insert(configs, val .. "/" .. path_name .. "/autogpt")
|
||||
end
|
||||
local without_conf_dir = "/root/" .. path_name .. "/autogpt"
|
||||
if default_path == without_conf_dir then
|
||||
default_path = configs[1]
|
||||
end
|
||||
end
|
||||
|
||||
return configs, default_path
|
||||
end
|
||||
|
||||
return autogpt
|
@ -0,0 +1,50 @@
|
||||
--[[
|
||||
LuCI - Lua Configuration Interface
|
||||
]]--
|
||||
|
||||
local taskd = require "luci.model.tasks"
|
||||
local autogpt_model = require "luci.model.autogpt"
|
||||
local m, s, o
|
||||
|
||||
m = taskd.docker_map("autogpt", "autogpt", "/usr/libexec/apps/autogpt/autogpt.sh",
|
||||
translate("AutoGPT"),
|
||||
translate(" Auto-GPT: An Autonomous GPT-4 Experiment. This program, driven by GPT-4, chains together LLM, to autonomously achieve whatever goal you set.")
|
||||
.. translate("Official website:") .. ' <a href=\"https://github.com/Significant-Gravitas/Auto-GPT\" target=\"_blank\">https://github.com/Significant-Gravitas/Auto-GPT</a>')
|
||||
|
||||
s = m:section(SimpleSection, translate("Service Status"), translate("autogpt status:"))
|
||||
s:append(Template("autogpt/status"))
|
||||
|
||||
s = m:section(TypedSection, "autogpt", translate("Setup"), translate("Refresh to update settings."))
|
||||
s.addremove = false
|
||||
s.anonymous = true
|
||||
|
||||
-- Config options with their default values
|
||||
local defaults = {
|
||||
openai = "3BlbkFdfgs5eahdth5a54ey",
|
||||
config_path = "./auto_gpt_workspace",
|
||||
image_name = "significantgravitas/auto-gpt",
|
||||
mem_back = "json_file",
|
||||
mem_index = "auto-gpt",
|
||||
headless = "true",
|
||||
execute = "true",
|
||||
chunk_max = "8192",
|
||||
smart_llm = "gpt-4",
|
||||
fast_llm = "gpt-3.5-turbo",
|
||||
fast_token = "4000",
|
||||
smart_token = "8000",
|
||||
port = "3000",
|
||||
}
|
||||
|
||||
for option, default_value in pairs(defaults) do
|
||||
o = s:option(Value, option, translate(option:gsub("_", " "):gsub("^%l", string.upper)))
|
||||
o.default = default_value
|
||||
o.rmempty = false
|
||||
end
|
||||
|
||||
local paths, default_path = autogpt_model.find_paths(blocks, home, "Configs")
|
||||
for _, val in pairs(paths) do
|
||||
o:value(val, val)
|
||||
end
|
||||
o.default = default_path
|
||||
|
||||
return m
|
@ -0,0 +1,31 @@
|
||||
<%
|
||||
local util = require "luci.util"
|
||||
local container_status = util.trim(util.exec("/usr/libexec/apps/autogpt/autogpt.sh status"))
|
||||
local container_install = (string.len(container_status) > 0)
|
||||
local container_running = container_status == "running"
|
||||
-%>
|
||||
<div class="cbi-value">
|
||||
<label class="cbi-value-title"><%:Status%></label>
|
||||
<div class="cbi-value-field">
|
||||
<% if container_running then %>
|
||||
<button class="cbi-button cbi-button-success" disabled="true"><%:autogpt is running%></button>
|
||||
<% else %>
|
||||
<button class="cbi-button cbi-button-negative" disabled="true"><%:autogpt is not running%></button>
|
||||
<% end %>
|
||||
</div>
|
||||
</div>
|
||||
<%
|
||||
if container_running then
|
||||
local port=util.trim(util.exec("/usr/libexec/apps/autogpt/autogpt.sh port"))
|
||||
if port == "" then
|
||||
port="5741"
|
||||
end
|
||||
-%>
|
||||
<div class="cbi-value cbi-value-last">
|
||||
<label class="cbi-value-title"> </label>
|
||||
<div class="cbi-value-field">
|
||||
|
||||
<input type="button" class="btn cbi-button cbi-button-apply" name="start" value="<%:Open autogpt%>" onclick="window.open('http://'+location.hostname+':<%=port%>/', '_blank')">
|
||||
</div>
|
||||
</div>
|
||||
<% end %>
|
@ -0,0 +1,15 @@
|
||||
config autogpt
|
||||
option 'openai' '3BlbkFdfgs5eahdth5a54ey'
|
||||
option 'config_path' './auto_gpt_workspace'
|
||||
option 'image_name' 'significantgravitas/auto-gpt'
|
||||
option 'mem_back' 'json_file'
|
||||
option 'mem_index' 'auto-gpt'
|
||||
option 'headless' 'true'
|
||||
option 'execute' 'true'
|
||||
option 'chunk_max' '8192'
|
||||
option 'smart_llm' 'gpt-4'
|
||||
option 'fast_llm' 'gpt-3.5-turbo'
|
||||
option 'fast_token' '4000'
|
||||
option 'smart_token' '8000'
|
||||
option 'port' '3000'
|
||||
|
@ -0,0 +1,14 @@
|
||||
#!/bin/sh
|
||||
|
||||
config_dir=`uci -q get autogpt.@autogpt[0].config_path`
|
||||
|
||||
data_dir=`docker inspect --format '{{.Mounts}}' autogpt | grep -Eom1 '[^ ]+/_data /var/www/html local true ' | cut -d' ' -f1`
|
||||
|
||||
if [ -n "$data_dir" -a "$data_dir" != "$config_dir" ]; then
|
||||
uci -q batch <<-EOF >/dev/null
|
||||
set autogpt.@autogpt[0].config_path="$data_dir"
|
||||
commit autogpt
|
||||
EOF
|
||||
fi
|
||||
|
||||
exit 0
|
@ -0,0 +1,132 @@
|
||||
#!/bin/sh
|
||||
|
||||
ACTION=${1}
|
||||
shift 1
|
||||
|
||||
get_image() {
|
||||
IMAGE_NAME=$(uci get autogpt.@autogpt[0].image_name 2>/dev/null)
|
||||
}
|
||||
|
||||
do_install_detail() {
|
||||
local openai=`uci get autogpt.@autogpt[0].openai 2>/dev/null`
|
||||
local config_path=`uci get autogpt.@autogpt[0].config_path 2>/dev/null`
|
||||
local image_name=`uci get autogpt.@autogpt[0].image_name 2>/dev/null`
|
||||
local mem_back=`uci get autogpt.@autogpt[0].mem_back 2>/dev/null`
|
||||
local mem_index=`uci get autogpt.@autogpt[0].mem_index 2>/dev/null`
|
||||
local headless=`uci get autogpt.@autogpt[0].headless 2>/dev/null`
|
||||
local execute=`uci get autogpt.@autogpt[0].execute 2>/dev/null`
|
||||
local chunk_max=`uci get autogpt.@autogpt[0].chunk_max 2>/dev/null`
|
||||
local smart_llm=`uci get autogpt.@autogpt[0].smart_llm 2>/dev/null`
|
||||
local fast_llm=`uci get autogpt.@autogpt[0].fast_llm 2>/dev/null`
|
||||
local fast_token=`uci get autogpt.@autogpt[0].fast_token 2>/dev/null`
|
||||
local smart_token=`uci get autogpt.@autogpt[0].smart_token 2>/dev/null`
|
||||
local port=`uci get autogpt.@autogpt[0].port 2>/dev/null`
|
||||
|
||||
GEN_PASS=$(< /dev/urandom tr -dc A-Za-z0-9 2>/dev/null | head -c14; echo)
|
||||
GEN_PASS2=$(< /dev/urandom tr -dc A-Za-z0-9 2>/dev/null | head -c14; echo)
|
||||
|
||||
LAN_IP=$(uci get network.lan.ipaddr)
|
||||
LAN_IP="${LAN_IP%/*}"
|
||||
|
||||
if [ -z "$config" ]; then
|
||||
echo "config path is empty!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
[ -z "$port" ] && port={port}
|
||||
[ -z "$IMAGE_NAME" ] && IMAGE_NAME={image}
|
||||
|
||||
rm -R /opt/docker2/compose/autogpt
|
||||
mkdir /opt/docker2/compose/autogpt
|
||||
|
||||
touch /opt/docker2/compose/autogpt/docker-compose.yml
|
||||
cat > /opt/docker2/compose/autogpt/docker-compose.yml <<EOF
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
auto-gpt:
|
||||
image: $image_name
|
||||
restart: on-failure
|
||||
init: true
|
||||
stop_grace_period: 1m
|
||||
tty: true
|
||||
environment:
|
||||
- OPENAI_API_KEY=$openai
|
||||
- MEMORY_BACKEND=$mem_back
|
||||
- MEMORY_INDEX=$mem_index
|
||||
- HEADLESS_BROWSER=$headless
|
||||
- EXECUTE_LOCAL_COMMANDS=$execute
|
||||
- BROWSE_CHUNK_MAX_LENGTH=$chunk_max
|
||||
- SMART_LLM_MODEL=$smart_llm
|
||||
- FAST_LLM_MODEL=$fast_llm
|
||||
- FAST_TOKEN_LIMIT=$fast_token
|
||||
- SMART_TOKEN_LIMIT=$smart_token
|
||||
volumes:
|
||||
- $config_path:/app/auto_gpt_workspace:rw
|
||||
- ./data:/app/data:rw
|
||||
- ./logs:/app/logs:rw
|
||||
ports:
|
||||
- $port:3000
|
||||
|
||||
networks:
|
||||
default:
|
||||
name: auto-gpt
|
||||
EOF
|
||||
|
||||
docker compose build auto-gpt
|
||||
docker compose run --rm auto-gpt
|
||||
|
||||
uci add shortcutmenu lists
|
||||
uci set shortcutmenu.@lists[-1].webname="$IMAGE_NAME"
|
||||
uci set shortcutmenu.@lists[-1].weburl="$LAN_IP:$port"
|
||||
uci set shortcutmenu.@lists[-1].webpath="/"
|
||||
uci commit shortcutmenu
|
||||
}
|
||||
|
||||
usage() {
|
||||
echo "usage: $0 sub-command"
|
||||
echo "where sub-command is one of:"
|
||||
echo " install Install autogpt"
|
||||
echo " upgrade Upgrade autogpt"
|
||||
echo " rm/start/stop/restart Remove/Start/Stop/Restart autogpt"
|
||||
echo " status autogpt status"
|
||||
echo " port autogpt port"
|
||||
}
|
||||
|
||||
case ${ACTION} in
|
||||
"install"|"upgrade")
|
||||
get_image
|
||||
do_install_detail
|
||||
;;
|
||||
"rm")
|
||||
get_image
|
||||
CONTAINER_IDS=$(docker ps -a --filter "ancestor=${IMAGE_NAME}" --format '{{.ID}}')
|
||||
echo "Stopping and removing containers..."
|
||||
for ID in $CONTAINER_IDS; do
|
||||
docker stop $ID
|
||||
docker rm $ID
|
||||
done
|
||||
docker rmi -f $IMAGE_NAME
|
||||
;;
|
||||
"start"|"stop"|"restart")
|
||||
get_image
|
||||
CONTAINER_IDS=$(docker ps -a --filter "ancestor=${IMAGE_NAME}" --format '{{.ID}}')
|
||||
for ID in $CONTAINER_IDS; do
|
||||
docker ${ACTION} ${ID}
|
||||
done
|
||||
;;
|
||||
"status")
|
||||
get_image
|
||||
CONTAINER_NAMES=$(docker ps -a --filter "ancestor=${IMAGE_NAME}" --format '{{.Names}}')
|
||||
docker ps --all -f "name=${CONTAINER_NAMES}" --format '{{.Status}}'
|
||||
;;
|
||||
"port")
|
||||
get_image
|
||||
CONTAINER_NAMES=$(docker ps -a --filter "ancestor=${IMAGE_NAME}" --format '{{.Names}}')
|
||||
docker ps --all -f "name=${CONTAINER_NAMES}" --format '{{.Ports}}' | grep -om1 '0.0.0.0:[0-9]*' | sed 's/0.0.0.0://'
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
@ -0,0 +1,11 @@
|
||||
{
|
||||
"luci-app-autogpt": {
|
||||
"description": "Grant UCI access for luci-app-autogpt",
|
||||
"read": {
|
||||
"uci": [ "autogpt" ]
|
||||
},
|
||||
"write": {
|
||||
"uci": [ "autogpt" ]
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue