diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..42541cc --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +.venv +token.yaml +ollama-api.yaml \ No newline at end of file diff --git a/CloudGPT.openrc b/CloudGPT.openrc new file mode 100644 index 0000000..422d15b --- /dev/null +++ b/CloudGPT.openrc @@ -0,0 +1,10 @@ +#!/sbin/openrc-run + +name="$RC_SVCNAME" +command="/app/bin/run.sh" +command_background=true +pidfile="/run/$RC_SVCNAME" + +depend() { + need net +} diff --git a/__pycache__/discord.cpython-313.pyc b/__pycache__/discord.cpython-313.pyc new file mode 100644 index 0000000..689ff21 Binary files /dev/null and b/__pycache__/discord.cpython-313.pyc differ diff --git a/__pycache__/skills.cpython-313.pyc b/__pycache__/skills.cpython-313.pyc new file mode 100644 index 0000000..d69e82a Binary files /dev/null and b/__pycache__/skills.cpython-313.pyc differ diff --git a/bin/CloudGPT.py b/bin/CloudGPT.py new file mode 100644 index 0000000..dbeec08 --- /dev/null +++ b/bin/CloudGPT.py @@ -0,0 +1,57 @@ +import discord +from discord.ext import commands +import skills as skills +import asyncio +import yaml + +with open("token.yaml", "r") as yamlfile: + data = yaml.load(yamlfile, Loader=yaml.FullLoader) + +# ----------------------------- +# Discord Intents +# ----------------------------- +intents = discord.Intents.default() +intents.message_content = True + +bot = commands.Bot(command_prefix="!", intents=intents) + + +# ----------------------------- +# Events +# ----------------------------- +@bot.event +async def on_ready(): + print(f"Logged in as {bot.user} ✅") + +@bot.event +async def on_message(message): + # Bot ignoriert eigene Nachrichten + if message.author == bot.user: + return + + try: + # Wenn Nachricht ein Command ist, nur Commands verarbeiten + if message.content.startswith(bot.command_prefix): + await bot.process_commands(message) + return + + # Alles andere → skills.process_text + loop = asyncio.get_running_loop() + result = await loop.run_in_executor(None, skills.process_text, message.content) + + # Längere Ausgaben als Code-Block + if len(result) > 1900: + await message.channel.send(f"```\n{result}\n```") + else: + await message.channel.send(result) + + except Exception as e: + print(f"Error in on_message: {e}") + await message.channel.send(f"⚠️ Fehler: {e}") + + +# ----------------------------- +# Bot starten +# ----------------------------- + +bot.run(data[0]['token']) diff --git a/bin/__pycache__/skills.cpython-313.pyc b/bin/__pycache__/skills.cpython-313.pyc new file mode 100644 index 0000000..3cb8761 Binary files /dev/null and b/bin/__pycache__/skills.cpython-313.pyc differ diff --git a/bin/run.sh b/bin/run.sh new file mode 100644 index 0000000..3f68870 --- /dev/null +++ b/bin/run.sh @@ -0,0 +1,21 @@ +#!/bin/sh + +git pull -C /app + +if [ ! -d /app/.venv ]; +then + logger "generating python virtual-env" + python3 -m venv /app/.venv + echo '.venv' >> /app/.gitignore +fi + +source /app/.venv/bin/activate + +if [ -f /app/requirements.txt ]; +then + logger "updating python packages" + pip3 install -U -r /app/requirements.txt +fi + +logger "starting CloudGPT" +python3 /app/bin/CloudGPT.py 2>&1 | while IFS= read -r line; do logger "$line"; done diff --git a/bin/skills.py b/bin/skills.py new file mode 100644 index 0000000..43afabb --- /dev/null +++ b/bin/skills.py @@ -0,0 +1,50 @@ +import os +from ollama import Client +import yaml + +with open("ollama-api.yaml", "r") as yamlfile: + data = yaml.safe_load(yamlfile) + +# ----------------------------- +# Ollama Client +# ----------------------------- + +client = Client( + host="https://ollama.com", + headers={"Authorization": f"Bearer {data[0]['token']}"} +) + + +# ----------------------------- +# Ollama-Funktion +# ----------------------------- +def ask_ollama(prompt): + """GPT via Ollama Python Client""" + try: + messages = [{"role": "user", "content": prompt}] + response = client.chat("gpt-oss:120b-cloud", messages=messages, stream=False) + + # Absicherung: Prüfen ob key existiert + if "message" in response and "content" in response["message"]: + return response["message"]["content"] + elif "content" in response: + return response["content"] + else: + return "⚠️ Ollama hat keine Antwort geliefert." + except Exception as e: + return f"Error talking to GPT-oss: {str(e)}" + +# ----------------------------- +# Hauptfunktion zur Textverarbeitung +# ----------------------------- +def process_text(command): + command_lower = command.lower() + print("Command received:", command) + return ask_ollama(command) + +# ----------------------------- +# Lokaler Test +# ----------------------------- +if __name__ == "__main__": + user_input = input("Enter your command: ") + print(process_text(user_input)) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..2d0bad1 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +discord +pyyaml +ollama +asyncio