diff options
| author | Abdellah El Morabit <nsrddyn@gmail.com> | 2024-11-14 19:31:39 +0100 |
|---|---|---|
| committer | Abdellah El Morabit <nsrddyn@gmail.com> | 2024-11-14 19:31:39 +0100 |
| commit | ec2afe02ea46b20a4d3c931f58731e8a6fbd6b46 (patch) | |
| tree | 4f12355a857c0ecafb75932ded7cbf9def79eb35 | |
| parent | fe441f69a4632e5245588574923ef8dc467eced5 (diff) | |
removing ollama it was a bad idea
| -rw-r--r-- | bin/source.py | 22 |
1 files changed, 0 insertions, 22 deletions
diff --git a/bin/source.py b/bin/source.py index c232351..f30c066 100644 --- a/bin/source.py +++ b/bin/source.py @@ -1,6 +1,5 @@ import time import os -import ollama import speech_recognition as speech import i2c as LCD from gpiozero import CPUTemperature @@ -69,30 +68,9 @@ def save_notes(): if OUTPUT == i: PRINT_REQUEST == False -def lama(): - QUESTION_REQUEST = True - while QUESTION_REQUEST == True: - QUESTION_REQUEST = input - ("do you want to ask a question") == 'yes' - if QUESTION_REQUEST == True: - USER_QUESTION = input() - response = lama.chat( - model="llama3.2", - messages=[ - { - "role": "user", - "content": USER_QUESTION, - }, - ], - ) - print(response["messages"]["content"]) - else: - break - OPTIONS = { - "LAMA": ollama(), "CPU_INFO": display_cpu_info(), "UPTIME": display_uptime(), "SPEECH_TRANSCRIBER": recognize_speech(), |
