From ba7994e91ff4513549569531309ea35d7c0ac133 Mon Sep 17 00:00:00 2001 From: Ben Date: Fri, 8 Sep 2023 11:48:08 +0000 Subject: [PATCH] first commit --- Makefile | 16 +++ README.md | 0 luasrc/controller/autogpt.lua | 15 ++ luasrc/model/autogpt.lua | 55 ++++++++ luasrc/model/cbi/autogpt.lua | 50 +++++++ luasrc/view/autogpt/status.htm | 31 ++++ root/etc/config/autogpt | 15 ++ root/etc/uci-defaults/luci-app-autogpt | 14 ++ root/usr/libexec/apps/autogpt/autogpt.sh | 132 ++++++++++++++++++ .../share/rpcd/acl.d/luci-app-autogpt.json | 11 ++ 10 files changed, 339 insertions(+) create mode 100644 Makefile create mode 100644 README.md create mode 100644 luasrc/controller/autogpt.lua create mode 100644 luasrc/model/autogpt.lua create mode 100644 luasrc/model/cbi/autogpt.lua create mode 100644 luasrc/view/autogpt/status.htm create mode 100644 root/etc/config/autogpt create mode 100644 root/etc/uci-defaults/luci-app-autogpt create mode 100755 root/usr/libexec/apps/autogpt/autogpt.sh create mode 100644 root/usr/share/rpcd/acl.d/luci-app-autogpt.json diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..78873b4 --- /dev/null +++ b/Makefile @@ -0,0 +1,16 @@ + + +include $(TOPDIR)/rules.mk + + +LUCI_TITLE:=LuCI support for autogpt +LUCI_PKGARCH:=all +LUCI_DEPENDS:=+lsblk +docker +luci-lib-taskd + +define Package/luci-app-autogpt/conffiles +/etc/config/autogpt +endef + +include $(TOPDIR)/feeds/luci/luci.mk + +# call BuildPackage - OpenWrt buildroot signature diff --git a/README.md b/README.md new file mode 100644 index 0000000..e69de29 diff --git a/luasrc/controller/autogpt.lua b/luasrc/controller/autogpt.lua new file mode 100644 index 0000000..04e6c47 --- /dev/null +++ b/luasrc/controller/autogpt.lua @@ -0,0 +1,15 @@ + +module("luci.controller.autogpt", package.seeall) + +function index() + entry({"admin", "apps"}, firstchild(), _("Apps") , 45).dependent = false + if not nixio.fs.access("/etc/config/autogpt") then + return + end + + local page = entry({"admin", "apps", "autogpt"}, cbi("autogpt"), _("autogpt")) + page.order = 10 + page.dependent = true + page.acl_depends = { "luci-app-autogpt" } + entry({"admin","apps","autogpt","status"},call("act_status")).leaf=true +end diff --git a/luasrc/model/autogpt.lua b/luasrc/model/autogpt.lua new file mode 100644 index 0000000..491e6ff --- /dev/null +++ b/luasrc/model/autogpt.lua @@ -0,0 +1,55 @@ +local util = require "luci.util" +local jsonc = require "luci.jsonc" + +local autogpt = {} + +autogpt.blocks = function() + local f = io.popen("lsblk -s -f -b -o NAME,FSSIZE,MOUNTPOINT --json", "r") + local vals = {} + if f then + local ret = f:read("*all") + f:close() + local obj = jsonc.parse(ret) + for _, val in pairs(obj["blockdevices"]) do + local fsize = val["fssize"] + if fsize ~= nil and string.len(fsize) > 10 and val["mountpoint"] then + -- fsize > 1G + vals[#vals+1] = val["mountpoint"] + end + end + end + return vals +end + +autogpt.home = function() + local uci = require "luci.model.uci".cursor() + local home_dirs = {} + home_dirs["main_dir"] = uci:get_first("quickstart", "main", "main_dir", "/root") + home_dirs["Configs"] = uci:get_first("quickstart", "main", "conf_dir", home_dirs["main_dir"].."/Configs") + home_dirs["Public"] = uci:get_first("quickstart", "main", "pub_dir", home_dirs["main_dir"].."/Public") + home_dirs["Downloads"] = uci:get_first("quickstart", "main", "dl_dir", home_dirs["Public"].."/Downloads") + home_dirs["Caches"] = uci:get_first("quickstart", "main", "tmp_dir", home_dirs["main_dir"].."/Caches") + return home_dirs +end + +autogpt.find_paths = function(blocks, home_dirs, path_name) + local default_path = '' + local configs = {} + + default_path = home_dirs[path_name] .. "/autogpt" + if #blocks == 0 then + table.insert(configs, default_path) + else + for _, val in pairs(blocks) do + table.insert(configs, val .. "/" .. path_name .. "/autogpt") + end + local without_conf_dir = "/root/" .. path_name .. "/autogpt" + if default_path == without_conf_dir then + default_path = configs[1] + end + end + + return configs, default_path +end + +return autogpt diff --git a/luasrc/model/cbi/autogpt.lua b/luasrc/model/cbi/autogpt.lua new file mode 100644 index 0000000..01a0f7e --- /dev/null +++ b/luasrc/model/cbi/autogpt.lua @@ -0,0 +1,50 @@ +--[[ +LuCI - Lua Configuration Interface +]]-- + +local taskd = require "luci.model.tasks" +local autogpt_model = require "luci.model.autogpt" +local m, s, o + +m = taskd.docker_map("autogpt", "autogpt", "/usr/libexec/apps/autogpt/autogpt.sh", + translate("AutoGPT"), + translate(" Auto-GPT: An Autonomous GPT-4 Experiment. This program, driven by GPT-4, chains together LLM, to autonomously achieve whatever goal you set.") + .. translate("Official website:") .. ' https://github.com/Significant-Gravitas/Auto-GPT') + +s = m:section(SimpleSection, translate("Service Status"), translate("autogpt status:")) +s:append(Template("autogpt/status")) + +s = m:section(TypedSection, "autogpt", translate("Setup"), translate("Refresh to update settings.")) +s.addremove = false +s.anonymous = true + +-- Config options with their default values +local defaults = { + openai = "3BlbkFdfgs5eahdth5a54ey", + config_path = "./auto_gpt_workspace", + image_name = "significantgravitas/auto-gpt", + mem_back = "json_file", + mem_index = "auto-gpt", + headless = "true", + execute = "true", + chunk_max = "8192", + smart_llm = "gpt-4", + fast_llm = "gpt-3.5-turbo", + fast_token = "4000", + smart_token = "8000", + port = "3000", +} + +for option, default_value in pairs(defaults) do + o = s:option(Value, option, translate(option:gsub("_", " "):gsub("^%l", string.upper))) + o.default = default_value + o.rmempty = false +end + +local paths, default_path = autogpt_model.find_paths(blocks, home, "Configs") +for _, val in pairs(paths) do + o:value(val, val) +end +o.default = default_path + +return m diff --git a/luasrc/view/autogpt/status.htm b/luasrc/view/autogpt/status.htm new file mode 100644 index 0000000..ce689b7 --- /dev/null +++ b/luasrc/view/autogpt/status.htm @@ -0,0 +1,31 @@ +<% +local util = require "luci.util" +local container_status = util.trim(util.exec("/usr/libexec/apps/autogpt/autogpt.sh status")) +local container_install = (string.len(container_status) > 0) +local container_running = container_status == "running" +-%> +
+ +
+ <% if container_running then %> + + <% else %> + + <% end %> +
+
+<% +if container_running then + local port=util.trim(util.exec("/usr/libexec/apps/autogpt/autogpt.sh port")) + if port == "" then + port="5741" + end +-%> +
+ +
+ + +
+
+<% end %> diff --git a/root/etc/config/autogpt b/root/etc/config/autogpt new file mode 100644 index 0000000..8a86fe2 --- /dev/null +++ b/root/etc/config/autogpt @@ -0,0 +1,15 @@ +config autogpt + option 'openai' '3BlbkFdfgs5eahdth5a54ey' + option 'config_path' './auto_gpt_workspace' + option 'image_name' 'significantgravitas/auto-gpt' + option 'mem_back' 'json_file' + option 'mem_index' 'auto-gpt' + option 'headless' 'true' + option 'execute' 'true' + option 'chunk_max' '8192' + option 'smart_llm' 'gpt-4' + option 'fast_llm' 'gpt-3.5-turbo' + option 'fast_token' '4000' + option 'smart_token' '8000' + option 'port' '3000' + diff --git a/root/etc/uci-defaults/luci-app-autogpt b/root/etc/uci-defaults/luci-app-autogpt new file mode 100644 index 0000000..3cf2322 --- /dev/null +++ b/root/etc/uci-defaults/luci-app-autogpt @@ -0,0 +1,14 @@ +#!/bin/sh + +config_dir=`uci -q get autogpt.@autogpt[0].config_path` + +data_dir=`docker inspect --format '{{.Mounts}}' autogpt | grep -Eom1 '[^ ]+/_data /var/www/html local true ' | cut -d' ' -f1` + +if [ -n "$data_dir" -a "$data_dir" != "$config_dir" ]; then + uci -q batch <<-EOF >/dev/null + set autogpt.@autogpt[0].config_path="$data_dir" + commit autogpt +EOF +fi + +exit 0 diff --git a/root/usr/libexec/apps/autogpt/autogpt.sh b/root/usr/libexec/apps/autogpt/autogpt.sh new file mode 100755 index 0000000..063cdfd --- /dev/null +++ b/root/usr/libexec/apps/autogpt/autogpt.sh @@ -0,0 +1,132 @@ +#!/bin/sh + +ACTION=${1} +shift 1 + +get_image() { + IMAGE_NAME=$(uci get autogpt.@autogpt[0].image_name 2>/dev/null) +} + +do_install_detail() { + local openai=`uci get autogpt.@autogpt[0].openai 2>/dev/null` + local config_path=`uci get autogpt.@autogpt[0].config_path 2>/dev/null` + local image_name=`uci get autogpt.@autogpt[0].image_name 2>/dev/null` + local mem_back=`uci get autogpt.@autogpt[0].mem_back 2>/dev/null` + local mem_index=`uci get autogpt.@autogpt[0].mem_index 2>/dev/null` + local headless=`uci get autogpt.@autogpt[0].headless 2>/dev/null` + local execute=`uci get autogpt.@autogpt[0].execute 2>/dev/null` + local chunk_max=`uci get autogpt.@autogpt[0].chunk_max 2>/dev/null` + local smart_llm=`uci get autogpt.@autogpt[0].smart_llm 2>/dev/null` + local fast_llm=`uci get autogpt.@autogpt[0].fast_llm 2>/dev/null` + local fast_token=`uci get autogpt.@autogpt[0].fast_token 2>/dev/null` + local smart_token=`uci get autogpt.@autogpt[0].smart_token 2>/dev/null` + local port=`uci get autogpt.@autogpt[0].port 2>/dev/null` + + GEN_PASS=$(< /dev/urandom tr -dc A-Za-z0-9 2>/dev/null | head -c14; echo) + GEN_PASS2=$(< /dev/urandom tr -dc A-Za-z0-9 2>/dev/null | head -c14; echo) + + LAN_IP=$(uci get network.lan.ipaddr) + LAN_IP="${LAN_IP%/*}" + + if [ -z "$config" ]; then + echo "config path is empty!" + exit 1 + fi + + [ -z "$port" ] && port={port} + [ -z "$IMAGE_NAME" ] && IMAGE_NAME={image} + + rm -R /opt/docker2/compose/autogpt + mkdir /opt/docker2/compose/autogpt + + touch /opt/docker2/compose/autogpt/docker-compose.yml + cat > /opt/docker2/compose/autogpt/docker-compose.yml <