Skip to content

Commit 36d5a08

Browse files
committed
Refactor debug.py for cleaner debugpy handling, enhance logging levels in main.py, and improve error handling in task_list.py; Checkpoint before refactoring summary.py
1 parent cb175fa commit 36d5a08

4 files changed

Lines changed: 36 additions & 19 deletions

File tree

src/ai_chunking.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def chunk_messages(prior_messages, recent_channel_messages, max_tokens=30000, as
3636
logging.debug(f"Number of chunks: {len(chunks)}")
3737
return chunks
3838

39-
39+
# This is an Arun's implementation of chunking messages for OpenAI API.
4040
def group_messages(messages, model):
4141
"""
4242
Groups messages into chunks based on the token limit of the model.

src/debug.py

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -50,15 +50,11 @@ def _watch():
5050

5151
watch_for_debugger_connects()
5252

53-
if True:
54-
if os.getenv("DEBUGPY_WAIT") == "true":
55-
logging.info("🪛 debugpy enabled, now waiting_for_client")
56-
debugpy.wait_for_client() # noqa # NEVER BREAKPOINT HERE
57-
debugpy.breakpoint()
53+
if os.getenv("DEBUGPY_WAIT") == "true":
54+
logging.info("🪛 debugpy enabled, now waiting_for_client")
55+
debugpy.wait_for_client() # noqa # NEVER BREAKPOINT HERE
56+
debugpy.breakpoint()
5857

59-
logging.info("🪛 client attached")
60-
logging.info("Breakpoint should hit here") # ⛔ Set breakpoint here
58+
logging.info("🪛 client attached")
59+
logging.info("Breakpoint should hit here") # ⛔ Set breakpoint here
6160

62-
#if os.getenv("DEBUGPY_WAIT") == "true":
63-
# logging.info("🪛 debugpy enabled, now waiting_for_client")
64-
# debugpy.wait_for_client() # noqa

src/main.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,20 @@
11
import logging
2-
logging.basicConfig(level=logging.INFO)
2+
logging.basicConfig(level=logging.DEBUG)
3+
logging.getLogger('apscheduler').setLevel(logging.INFO)
4+
logging.getLogger('discord').setLevel(logging.INFO)
5+
logging.getLogger('werkzeug').setLevel(logging.INFO)
6+
logging.getLogger('flask').setLevel(logging.INFO)
7+
logging.getLogger('apscheduler.executors.default').setLevel(logging.INFO)
8+
logging.getLogger('apscheduler.executors.pool').setLevel(logging.INFO)
9+
logging.getLogger('apscheduler.scheduler').setLevel(logging.INFO)
10+
logging.getLogger('apscheduler.triggers.cron').setLevel(logging.INFO)
11+
logging.getLogger('apscheduler.triggers.interval').setLevel(logging.INFO)
12+
logging.getLogger('apscheduler.triggers.date').setLevel(logging.INFO)
13+
logging.getLogger('apscheduler.triggers.combining').setLevel(logging.INFO)
14+
logging.getLogger('apscheduler.job').setLevel(logging.INFO)
15+
logging.getLogger('apscheduler.jobstores.memory').setLevel(logging.INFO)
16+
logging.getLogger('apscheduler.jobstores.sqlalchemy').setLevel(logging.INFO)
17+
318

419
logging.info("Starting main.py")
520
import os
@@ -75,7 +90,8 @@ def run_webhook(bot, summary_func):
7590
# Run the Webhook Flask app in a thread to avoid blocking the bot
7691
webhook_server_thread = threading.Thread(target=run_webhook,
7792
args=(bot,
78-
summary_for_webhook)
93+
summary_for_webhook),
94+
name="webhook_server_thread",
7995
)
8096
webhook_server_thread.start()
8197

src/task_list.py

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,8 @@ def perform_job(self, payload):
5858

5959

6060
### WEBHOOK RELATED FUNCTIONS
61+
# This effectively calls ```summary_for_webhook.py:summary_for_webhook```
62+
6163
def call_webhook_with_payload(self, url, payload):
6264
logging.debug(f"Sending {payload} to {url}")
6365
headers = {"Content-Type": "application/json"}
@@ -78,16 +80,19 @@ def perform_job_using_webhook(self, payload):
7880
else:
7981
result = response.text
8082

83+
data = {}
84+
8185
try:
8286
data = json.loads(result)
83-
except json.JSONDecodeError:
84-
logging.error(f"Error decoding JSON: {result}")
85-
8687

87-
if data["message"]:
88-
message = data["message"]
89-
else:
88+
if data["message"]:
89+
message = data["message"]
90+
91+
except json.JSONDecodeError:
92+
message = "Error decoding JSON response - Chances are, we passed too many tokens."
93+
logging.error(message)
9094
message = result
95+
9196

9297
self.store_job_result(payload, message) #TODO: think about name
9398
return message

0 commit comments

Comments
 (0)