first upload
This commit is contained in:
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
.venv
|
||||
token.yaml
|
||||
ollama-api.yaml
|
||||
10
CloudGPT.openrc
Normal file
10
CloudGPT.openrc
Normal file
@ -0,0 +1,10 @@
|
||||
#!/sbin/openrc-run
|
||||
|
||||
name="$RC_SVCNAME"
|
||||
command="/app/bin/run.sh"
|
||||
command_background=true
|
||||
pidfile="/run/$RC_SVCNAME"
|
||||
|
||||
depend() {
|
||||
need net
|
||||
}
|
||||
BIN
__pycache__/discord.cpython-313.pyc
Normal file
BIN
__pycache__/discord.cpython-313.pyc
Normal file
Binary file not shown.
BIN
__pycache__/skills.cpython-313.pyc
Normal file
BIN
__pycache__/skills.cpython-313.pyc
Normal file
Binary file not shown.
57
bin/CloudGPT.py
Normal file
57
bin/CloudGPT.py
Normal file
@ -0,0 +1,57 @@
|
||||
import discord
|
||||
from discord.ext import commands
|
||||
import skills as skills
|
||||
import asyncio
|
||||
import yaml
|
||||
|
||||
with open("token.yaml", "r") as yamlfile:
|
||||
data = yaml.load(yamlfile, Loader=yaml.FullLoader)
|
||||
|
||||
# -----------------------------
|
||||
# Discord Intents
|
||||
# -----------------------------
|
||||
intents = discord.Intents.default()
|
||||
intents.message_content = True
|
||||
|
||||
bot = commands.Bot(command_prefix="!", intents=intents)
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# Events
|
||||
# -----------------------------
|
||||
@bot.event
|
||||
async def on_ready():
|
||||
print(f"Logged in as {bot.user} ✅")
|
||||
|
||||
@bot.event
|
||||
async def on_message(message):
|
||||
# Bot ignoriert eigene Nachrichten
|
||||
if message.author == bot.user:
|
||||
return
|
||||
|
||||
try:
|
||||
# Wenn Nachricht ein Command ist, nur Commands verarbeiten
|
||||
if message.content.startswith(bot.command_prefix):
|
||||
await bot.process_commands(message)
|
||||
return
|
||||
|
||||
# Alles andere → skills.process_text
|
||||
loop = asyncio.get_running_loop()
|
||||
result = await loop.run_in_executor(None, skills.process_text, message.content)
|
||||
|
||||
# Längere Ausgaben als Code-Block
|
||||
if len(result) > 1900:
|
||||
await message.channel.send(f"```\n{result}\n```")
|
||||
else:
|
||||
await message.channel.send(result)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error in on_message: {e}")
|
||||
await message.channel.send(f"⚠️ Fehler: {e}")
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# Bot starten
|
||||
# -----------------------------
|
||||
|
||||
bot.run(data[0]['token'])
|
||||
BIN
bin/__pycache__/skills.cpython-313.pyc
Normal file
BIN
bin/__pycache__/skills.cpython-313.pyc
Normal file
Binary file not shown.
21
bin/run.sh
Normal file
21
bin/run.sh
Normal file
@ -0,0 +1,21 @@
|
||||
#!/bin/sh
|
||||
|
||||
git pull -C /app
|
||||
|
||||
if [ ! -d /app/.venv ];
|
||||
then
|
||||
logger "generating python virtual-env"
|
||||
python3 -m venv /app/.venv
|
||||
echo '.venv' >> /app/.gitignore
|
||||
fi
|
||||
|
||||
source /app/.venv/bin/activate
|
||||
|
||||
if [ -f /app/requirements.txt ];
|
||||
then
|
||||
logger "updating python packages"
|
||||
pip3 install -U -r /app/requirements.txt
|
||||
fi
|
||||
|
||||
logger "starting CloudGPT"
|
||||
python3 /app/bin/CloudGPT.py 2>&1 | while IFS= read -r line; do logger "$line"; done
|
||||
50
bin/skills.py
Normal file
50
bin/skills.py
Normal file
@ -0,0 +1,50 @@
|
||||
import os
|
||||
from ollama import Client
|
||||
import yaml
|
||||
|
||||
with open("ollama-api.yaml", "r") as yamlfile:
|
||||
data = yaml.safe_load(yamlfile)
|
||||
|
||||
# -----------------------------
|
||||
# Ollama Client
|
||||
# -----------------------------
|
||||
|
||||
client = Client(
|
||||
host="https://ollama.com",
|
||||
headers={"Authorization": f"Bearer {data[0]['token']}"}
|
||||
)
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# Ollama-Funktion
|
||||
# -----------------------------
|
||||
def ask_ollama(prompt):
|
||||
"""GPT via Ollama Python Client"""
|
||||
try:
|
||||
messages = [{"role": "user", "content": prompt}]
|
||||
response = client.chat("gpt-oss:120b-cloud", messages=messages, stream=False)
|
||||
|
||||
# Absicherung: Prüfen ob key existiert
|
||||
if "message" in response and "content" in response["message"]:
|
||||
return response["message"]["content"]
|
||||
elif "content" in response:
|
||||
return response["content"]
|
||||
else:
|
||||
return "⚠️ Ollama hat keine Antwort geliefert."
|
||||
except Exception as e:
|
||||
return f"Error talking to GPT-oss: {str(e)}"
|
||||
|
||||
# -----------------------------
|
||||
# Hauptfunktion zur Textverarbeitung
|
||||
# -----------------------------
|
||||
def process_text(command):
|
||||
command_lower = command.lower()
|
||||
print("Command received:", command)
|
||||
return ask_ollama(command)
|
||||
|
||||
# -----------------------------
|
||||
# Lokaler Test
|
||||
# -----------------------------
|
||||
if __name__ == "__main__":
|
||||
user_input = input("Enter your command: ")
|
||||
print(process_text(user_input))
|
||||
4
requirements.txt
Normal file
4
requirements.txt
Normal file
@ -0,0 +1,4 @@
|
||||
discord
|
||||
pyyaml
|
||||
ollama
|
||||
asyncio
|
||||
Reference in New Issue
Block a user