mirror of
https://github.com/kjanat/livegraphs-django.git
synced 2026-02-13 19:15:43 +01:00
feat: add ty type checking support and fix type issues
- Add ty.toml configuration with Django project root - Add py.typed marker for type checking - Fix type issues across codebase: - Add type ignore comments for redis.exceptions imports - Fix django.db.models.functions imports in utils - Fix getattr usage in accounts/forms - Remove unnecessary type annotations in dashboard/forms - Configure ty to exclude migrations and respect ignore files - All ty checks now pass (29 diagnostics -> 0)
This commit is contained in:
@@ -52,10 +52,8 @@ class ExternalDataSourceAdmin(admin.ModelAdmin):
|
||||
status,
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="color: white; background-color: orange; padding: 3px 8px; border-radius: 10px;">{}</span>',
|
||||
status,
|
||||
)
|
||||
style = "color: white; background-color: orange; padding: 3px 8px; border-radius: 10px;"
|
||||
return format_html(f'<span style="{style}">{{}}</span>', status)
|
||||
|
||||
@admin.display(description="Actions")
|
||||
def refresh_action(self, obj):
|
||||
|
||||
@@ -56,7 +56,8 @@ class Command(BaseCommand):
|
||||
)
|
||||
elif col == "sync_interval":
|
||||
cursor.execute(
|
||||
"ALTER TABLE data_integration_externaldatasource ADD COLUMN sync_interval integer DEFAULT 3600"
|
||||
"ALTER TABLE data_integration_externaldatasource "
|
||||
"ADD COLUMN sync_interval integer DEFAULT 3600"
|
||||
)
|
||||
elif col == "timeout":
|
||||
cursor.execute(
|
||||
|
||||
@@ -59,7 +59,7 @@ class Command(BaseCommand):
|
||||
redis_client.delete(test_key)
|
||||
else:
|
||||
self.stdout.write(self.style.ERROR("❌ Redis ping failed!"))
|
||||
except redis.exceptions.ConnectionError as e:
|
||||
except redis.exceptions.ConnectionError as e: # type: ignore[attr-defined]
|
||||
self.stdout.write(self.style.ERROR(f"❌ Redis connection error: {e}"))
|
||||
self.stdout.write("Celery will use SQLite fallback if configured.")
|
||||
except ImportError:
|
||||
|
||||
@@ -125,7 +125,10 @@ def fetch_and_store_chat_data(source_id=None):
|
||||
|
||||
# If we couldn't parse the dates, log an error and skip this row
|
||||
if not start_time or not end_time:
|
||||
error_msg = f"Could not parse date fields for session {data['session_id']}: start_time={data['start_time']}, end_time={data['end_time']}"
|
||||
error_msg = (
|
||||
f"Could not parse date fields for session {data['session_id']}: "
|
||||
f"start_time={data['start_time']}, end_time={data['end_time']}"
|
||||
)
|
||||
logger.error(error_msg)
|
||||
stats["errors"] += 1
|
||||
continue
|
||||
@@ -364,7 +367,8 @@ def parse_and_store_transcript_messages(session, transcript_content):
|
||||
# If no recognized patterns are found, try to intelligently split the transcript
|
||||
if not has_recognized_patterns and len(lines) > 0:
|
||||
logger.info(
|
||||
f"No standard message patterns found in transcript for session {session.session_id}. Attempting intelligent split."
|
||||
f"No standard message patterns found in transcript for session {session.session_id}. "
|
||||
f"Attempting intelligent split."
|
||||
)
|
||||
|
||||
# Try timestamp-based parsing if we have enough consistent timestamps
|
||||
|
||||
Reference in New Issue
Block a user