mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-02-13 20:15:46 +01:00
Compare commits
7 Commits
developmen
...
bb078b4d6a
| Author | SHA1 | Date | |
|---|---|---|---|
|
bb078b4d6a
|
|||
| 33577bb2d5 | |||
| adea8ae6b7 | |||
| ef8601dd72 | |||
| 5aaca6de99 | |||
|
71c8aff125
|
|||
|
0c18e8be57
|
@@ -6,8 +6,4 @@ NEXTAUTH_URL=http://192.168.1.2:3000
|
||||
NEXTAUTH_SECRET=this_is_a_fixed_secret_for_development_only
|
||||
NODE_ENV=development
|
||||
|
||||
# OpenAI API key for session processing
|
||||
# Add your API key here: OPENAI_API_KEY=sk-...
|
||||
OPENAI_API_KEY=your_openai_api_key_here
|
||||
|
||||
# Database connection - already configured in your prisma/schema.prisma
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "uvx",
|
||||
"args": [
|
||||
"mcp-server-sqlite",
|
||||
"--db-path",
|
||||
"./prisma/dev.db"
|
||||
]
|
||||
},
|
||||
"filesystem": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-filesystem",
|
||||
"D:\\Notso\\Product\\Vibe-coding\\livedash-node"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
2
.github/workflows/playwright.yml
vendored
2
.github/workflows/playwright.yml
vendored
@@ -1,4 +1,6 @@
|
||||
name: Playwright Tests
|
||||
permissions:
|
||||
contents: read
|
||||
on:
|
||||
push:
|
||||
branches: [main, master]
|
||||
|
||||
186
.gitignore
vendored
186
.gitignore
vendored
@@ -261,3 +261,189 @@ Thumbs.db
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/node,macos
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node,macos
|
||||
|
||||
### macOS ###
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
### Node ###
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Moved from ./templates for ignoring all locks in templates
|
||||
templates/**/*-lock.*
|
||||
templates/**/*.lock
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
.temp
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
|
||||
### Node Patch ###
|
||||
# Serverless Webpack directories
|
||||
.webpack/
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
# SvelteKit build / generate output
|
||||
.svelte-kit
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/node,macos
|
||||
|
||||
# Wrangler output
|
||||
.wrangler/
|
||||
build/
|
||||
|
||||
# Turbo output
|
||||
.turbo/
|
||||
|
||||
.dev.vars*
|
||||
test-transcript-format.js
|
||||
|
||||
54
.prettierignore
Normal file
54
.prettierignore
Normal file
@@ -0,0 +1,54 @@
|
||||
# Dependencies
|
||||
node_modules/
|
||||
.pnpm-store/
|
||||
|
||||
# Build outputs
|
||||
.next/
|
||||
out/
|
||||
dist/
|
||||
build/
|
||||
|
||||
# Environment files
|
||||
.env
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
|
||||
# Database
|
||||
*.db
|
||||
*.sqlite
|
||||
prisma/migrations/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Git
|
||||
.git/
|
||||
|
||||
# Coverage reports
|
||||
coverage/
|
||||
|
||||
# Playwright
|
||||
test-results/
|
||||
playwright-report/
|
||||
playwright/.cache/
|
||||
|
||||
# Generated files
|
||||
*.generated.*
|
||||
|
||||
pnpm-lock.yaml
|
||||
47
GEMINI.md
47
GEMINI.md
@@ -1,47 +0,0 @@
|
||||
# Project Overview
|
||||
|
||||
This project is a Next.js application with a Node.js backend, designed to provide a live dashboard for data visualization and session management.
|
||||
|
||||
## Setup
|
||||
|
||||
To set up the project, follow these steps:
|
||||
|
||||
1. **Install Dependencies:**
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
2. **Environment Variables:**
|
||||
Create a `.env` file based on `.env.example` and fill in the necessary environment variables.
|
||||
|
||||
3. **Database Setup:**
|
||||
Run database migrations:
|
||||
```bash
|
||||
npx prisma migrate dev
|
||||
```
|
||||
Seed the database (optional):
|
||||
```bash
|
||||
npx prisma db seed
|
||||
```
|
||||
|
||||
4. **Run Development Server:**
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
## Common Commands
|
||||
|
||||
- **Run Tests:**
|
||||
```bash
|
||||
npm test
|
||||
```
|
||||
|
||||
- **Run Linter:**
|
||||
```bash
|
||||
npm run lint
|
||||
```
|
||||
|
||||
- **Build Project:**
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
128
TODO.md
128
TODO.md
@@ -1,78 +1,108 @@
|
||||
# TODO.md
|
||||
|
||||
# Refactor!!!
|
||||
## Dashboard Integration
|
||||
|
||||
> Based on my analysis of the codebase, here is a plan with recommendations for improving the project. The focus is on enhancing standardization, abstraction, user experience, and visual
|
||||
> design.
|
||||
- [ ] **Resolve `GeographicMap.tsx` and `ResponseTimeDistribution.tsx` data simulation**
|
||||
- Investigate integrating real data sources with server-side analytics
|
||||
- Replace simulated data mentioned in `docs/dashboard-components.md`
|
||||
|
||||
## High-Level Recommendations
|
||||
## Component Specific
|
||||
|
||||
The project has a solid foundation, but it could be significantly improved by focusing on three key areas:
|
||||
- [ ] **Implement robust emailing of temporary passwords**
|
||||
|
||||
1. Adopt a UI Component Library: While Tailwind CSS is excellent for styling, using a component library like ShadCN/UI or Headless UI would provide pre-built, accessible, and visually
|
||||
consistent components, saving development time and improving the user experience.
|
||||
2. Refactor for Next.js App Router: The project currently uses a mix of the pages and app directories. Migrating fully to the App Router would simplify the project structure, improve
|
||||
performance, and align with the latest Next.js features.
|
||||
3. Enhance User Experience: Implementing consistent loading and error states, improving responsiveness, and providing better user feedback would make the application more robust and
|
||||
user-friendly.
|
||||
- File: `pages/api/dashboard/users.ts`
|
||||
- Set up proper email service integration
|
||||
|
||||
## Detailed Improvement Plan
|
||||
- [x] **Session page improvements** ✅
|
||||
- File: `app/dashboard/sessions/page.tsx`
|
||||
- Implemented pagination, advanced filtering, and sorting
|
||||
|
||||
Here is a phased plan to implement these recommendations:
|
||||
## File Cleanup
|
||||
|
||||
### Phase 1: Foundational Improvements (Standardization & Abstraction)
|
||||
- [x] **Remove backup files** ✅
|
||||
- Reviewed and removed `.bak` and `.new` files after integration
|
||||
- Cleaned up `GeographicMap.tsx.bak`, `SessionDetails.tsx.bak`, `SessionDetails.tsx.new`
|
||||
|
||||
This phase focuses on cleaning up the codebase, standardizing the project structure, and improving the abstraction of core functionalities.
|
||||
## Database Schema Improvements
|
||||
|
||||
1. Standardize Project Structure:
|
||||
- [ ] **Update EndTime field**
|
||||
|
||||
- [x] Unify Server File: Consolidated server.js, server.mjs, and server.ts into a single server.ts file to remove redundancy. ✅
|
||||
- [x] Migrate to App Router: All API routes moved from `pages/api` to `app/api`. ✅
|
||||
- [x] Standardize Naming Conventions: All files and components already follow a consistent naming convention (e.g., PascalCase for components, kebab-case for files). ✅
|
||||
- Make `endTime` field nullable in Prisma schema to match TypeScript interfaces
|
||||
|
||||
2. Introduce a UI Component Library:
|
||||
- [ ] **Add database indices**
|
||||
|
||||
- Integrate ShadCN/UI: Add ShadCN/UI to the project to leverage its extensive library of accessible and customizable components.
|
||||
- Replace Custom Components: Gradually replace custom-built components in the components/ directory with their ShadCN/UI equivalents. This will improve visual consistency and reduce
|
||||
maintenance overhead.
|
||||
- Add appropriate indices to improve query performance
|
||||
- Focus on dashboard metrics and session listing queries
|
||||
|
||||
3. Refactor Core Logic:
|
||||
- Centralize Data Fetching: Create a dedicated module (e.g., lib/data-service.ts) to handle all data fetching logic, abstracting away the details of using Prisma and external APIs.
|
||||
- Isolate Business Logic: Ensure that business logic (e.g., session processing, metric calculation) is separated from the API routes and UI components.
|
||||
- [ ] **Implement production email service**
|
||||
- Replace console logging in `lib/sendEmail.ts`
|
||||
- Consider providers: Nodemailer, SendGrid, AWS SES
|
||||
|
||||
### Phase 2: UX and Visual Enhancements
|
||||
## General Enhancements & Features
|
||||
|
||||
This phase focuses on improving the user-facing aspects of the application.
|
||||
- [ ] **Real-time updates**
|
||||
|
||||
1. Implement Comprehensive Loading and Error States:
|
||||
- Implement for dashboard and session list
|
||||
- Consider WebSockets or Server-Sent Events
|
||||
|
||||
- Skeleton Loaders: Use skeleton loaders for dashboard components to provide a better loading experience.
|
||||
- Global Error Handling: Implement a global error handling strategy to catch and display user-friendly error messages for API failures or other unexpected issues.
|
||||
- [ ] **Data export functionality**
|
||||
|
||||
2. Redesign the Dashboard:
|
||||
- Allow users (especially admins) to export session data
|
||||
- Support CSV format initially
|
||||
|
||||
- Improve Information Hierarchy: Reorganize the dashboard to present the most important information first.
|
||||
- Enhance Visual Appeal: Use the new component library to create a more modern and visually appealing design with a consistent color palette and typography.
|
||||
- Improve Chart Interactivity: Add features like tooltips, zooming, and filtering to the charts to make them more interactive and informative.
|
||||
- [ ] **Customizable dashboard**
|
||||
- Allow users to customize dashboard view
|
||||
- Let users choose which metrics/charts are most important
|
||||
|
||||
3. Ensure Full Responsiveness:
|
||||
- Mobile-First Approach: Review and update all pages and components to ensure they are fully responsive and usable on a wide range of devices.
|
||||
## Testing & Quality Assurance
|
||||
|
||||
### Phase 3: Advanced Topics (Security, Performance, and Documentation)
|
||||
- [ ] **Comprehensive testing suite**
|
||||
|
||||
This phase focuses on long-term improvements to the project's stability, performance, and maintainability.
|
||||
- [ ] Unit tests for utility functions and API logic
|
||||
- [ ] Integration tests for API endpoints with database
|
||||
- [ ] End-to-end tests for user flows (Playwright or Cypress)
|
||||
|
||||
1. Conduct a Security Review:
|
||||
- [ ] **Error monitoring and logging**
|
||||
|
||||
- Input Validation: Ensure that all user inputs are properly validated on both the client and server sides.
|
||||
- Dependency Audit: Regularly audit dependencies for known vulnerabilities.
|
||||
- Integrate robust error monitoring service (Sentry)
|
||||
- Enhance server-side logging
|
||||
|
||||
2. Optimize Performance:
|
||||
- [ ] **Accessibility improvements**
|
||||
- Review application against WCAG guidelines
|
||||
- Improve keyboard navigation and screen reader compatibility
|
||||
- Check color contrast ratios
|
||||
|
||||
- Code Splitting: Leverage Next.js's automatic code splitting to reduce initial load times.
|
||||
- Caching: Implement caching strategies for frequently accessed data to reduce database load and improve API response times.
|
||||
## Security Enhancements
|
||||
|
||||
3. Expand Documentation:
|
||||
- API Documentation: Create detailed documentation for all API endpoints.
|
||||
- Component Library: Document the usage and props of all reusable components.
|
||||
- Update `AGENTS.md`: Keep the AGENTS.md file up-to-date with any architectural changes.
|
||||
- [x] **Password reset functionality** ✅
|
||||
|
||||
- Implemented secure password reset mechanism
|
||||
- Files: `app/forgot-password/page.tsx`, `app/reset-password/page.tsx`, `pages/api/forgot-password.ts`, `pages/api/reset-password.ts`
|
||||
|
||||
- [ ] **Two-Factor Authentication (2FA)**
|
||||
|
||||
- Consider adding 2FA, especially for admin accounts
|
||||
|
||||
- [ ] **Input validation and sanitization**
|
||||
- Review all user inputs (API request bodies, query parameters)
|
||||
- Ensure proper validation and sanitization
|
||||
|
||||
## Code Quality & Development
|
||||
|
||||
- [ ] **Code review process**
|
||||
|
||||
- Enforce code reviews for all changes
|
||||
|
||||
- [ ] **Environment configuration**
|
||||
|
||||
- Ensure secure management of environment-specific configurations
|
||||
|
||||
- [ ] **Dependency management**
|
||||
|
||||
- Periodically review dependencies for vulnerabilities
|
||||
- Keep dependencies updated
|
||||
|
||||
- [ ] **Documentation updates**
|
||||
- [ ] Ensure `docs/dashboard-components.md` reflects actual implementations
|
||||
- [ ] Verify "Dashboard Enhancements" are consistently applied
|
||||
- [ ] Update documentation for improved layout and visual hierarchies
|
||||
|
||||
@@ -4,6 +4,10 @@ import { useState, useEffect } from "react";
|
||||
import { useSession } from "next-auth/react";
|
||||
import { Company } from "../../../lib/types";
|
||||
|
||||
interface CompanyConfigResponse {
|
||||
company: Company;
|
||||
}
|
||||
|
||||
export default function CompanySettingsPage() {
|
||||
const { data: session, status } = useSession();
|
||||
// We store the full company object for future use and updates after save operations
|
||||
@@ -22,7 +26,7 @@ export default function CompanySettingsPage() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch("/api/dashboard/config");
|
||||
const data = await res.json();
|
||||
const data = (await res.json()) as CompanyConfigResponse;
|
||||
setCompany(data.company);
|
||||
setCsvUrl(data.company.csvUrl || "");
|
||||
setCsvUsername(data.company.csvUsername || "");
|
||||
@@ -58,10 +62,10 @@ export default function CompanySettingsPage() {
|
||||
if (res.ok) {
|
||||
setMessage("Settings saved successfully!");
|
||||
// Update local state if needed
|
||||
const data = await res.json();
|
||||
const data = (await res.json()) as CompanyConfigResponse;
|
||||
setCompany(data.company);
|
||||
} else {
|
||||
const error = await res.json();
|
||||
const error = (await res.json()) as { message?: string; };
|
||||
setMessage(
|
||||
`Failed to save settings: ${error.message || "Unknown error"}`
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState, useCallback } from "react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { signOut, useSession } from "next-auth/react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import {
|
||||
@@ -16,8 +16,11 @@ import WordCloud from "../../../components/WordCloud";
|
||||
import GeographicMap from "../../../components/GeographicMap";
|
||||
import ResponseTimeDistribution from "../../../components/ResponseTimeDistribution";
|
||||
import WelcomeBanner from "../../../components/WelcomeBanner";
|
||||
import DateRangePicker from "../../../components/DateRangePicker";
|
||||
import TopQuestionsChart from "../../../components/TopQuestionsChart";
|
||||
|
||||
interface MetricsApiResponse {
|
||||
metrics: MetricsResult;
|
||||
company: Company;
|
||||
}
|
||||
|
||||
// Safely wrapped component with useSession
|
||||
function DashboardContent() {
|
||||
@@ -27,56 +30,9 @@ function DashboardContent() {
|
||||
const [company, setCompany] = useState<Company | null>(null);
|
||||
const [, setLoading] = useState<boolean>(false);
|
||||
const [refreshing, setRefreshing] = useState<boolean>(false);
|
||||
const [dateRange, setDateRange] = useState<{
|
||||
minDate: string;
|
||||
maxDate: string;
|
||||
} | null>(null);
|
||||
const [selectedStartDate, setSelectedStartDate] = useState<string>("");
|
||||
const [selectedEndDate, setSelectedEndDate] = useState<string>("");
|
||||
|
||||
const isAuditor = session?.user?.role === "auditor";
|
||||
|
||||
// Function to fetch metrics with optional date range
|
||||
const fetchMetrics = useCallback(
|
||||
async (startDate?: string, endDate?: string) => {
|
||||
setLoading(true);
|
||||
try {
|
||||
let url = "/api/dashboard/metrics";
|
||||
if (startDate && endDate) {
|
||||
url += `?startDate=${startDate}&endDate=${endDate}`;
|
||||
}
|
||||
|
||||
const res = await fetch(url);
|
||||
const data = await res.json();
|
||||
|
||||
setMetrics(data.metrics);
|
||||
setCompany(data.company);
|
||||
|
||||
// Set date range from API response (only on initial load)
|
||||
if (data.dateRange && !dateRange) {
|
||||
setDateRange(data.dateRange);
|
||||
setSelectedStartDate(data.dateRange.minDate);
|
||||
setSelectedEndDate(data.dateRange.maxDate);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error fetching metrics:", error);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
},
|
||||
[dateRange]
|
||||
);
|
||||
|
||||
// Handle date range changes
|
||||
const handleDateRangeChange = useCallback(
|
||||
(startDate: string, endDate: string) => {
|
||||
setSelectedStartDate(startDate);
|
||||
setSelectedEndDate(endDate);
|
||||
fetchMetrics(startDate, endDate);
|
||||
},
|
||||
[fetchMetrics]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
// Redirect if not authenticated
|
||||
if (status === "unauthenticated") {
|
||||
@@ -86,9 +42,23 @@ function DashboardContent() {
|
||||
|
||||
// Fetch metrics and company on mount if authenticated
|
||||
if (status === "authenticated") {
|
||||
fetchMetrics();
|
||||
const fetchData = async () => {
|
||||
setLoading(true);
|
||||
const res = await fetch("/api/dashboard/metrics");
|
||||
const data = (await res.json()) as MetricsApiResponse;
|
||||
console.log("Metrics from API:", {
|
||||
avgSessionLength: data.metrics.avgSessionLength,
|
||||
avgSessionTimeTrend: data.metrics.avgSessionTimeTrend,
|
||||
totalSessionDuration: data.metrics.totalSessionDuration,
|
||||
validSessionsForDuration: data.metrics.validSessionsForDuration,
|
||||
});
|
||||
setMetrics(data.metrics);
|
||||
setCompany(data.company);
|
||||
setLoading(false);
|
||||
};
|
||||
fetchData();
|
||||
}
|
||||
}, [status, router, fetchMetrics]); // Add fetchMetrics to dependency array
|
||||
}, [status, router]); // Add status and router to dependency array
|
||||
|
||||
async function handleRefresh() {
|
||||
if (isAuditor) return; // Prevent auditors from refreshing
|
||||
@@ -111,10 +81,10 @@ function DashboardContent() {
|
||||
if (res.ok) {
|
||||
// Refetch metrics
|
||||
const metricsRes = await fetch("/api/dashboard/metrics");
|
||||
const data = await metricsRes.json();
|
||||
const data = (await metricsRes.json()) as MetricsApiResponse;
|
||||
setMetrics(data.metrics);
|
||||
} else {
|
||||
const errorData = await res.json();
|
||||
const errorData = (await res.json()) as { error: string; };
|
||||
alert(`Failed to refresh sessions: ${errorData.error}`);
|
||||
}
|
||||
} finally {
|
||||
@@ -266,19 +236,7 @@ function DashboardContent() {
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Date Range Picker */}
|
||||
{dateRange && (
|
||||
<DateRangePicker
|
||||
minDate={dateRange.minDate}
|
||||
maxDate={dateRange.maxDate}
|
||||
onDateRangeChange={handleDateRangeChange}
|
||||
initialStartDate={selectedStartDate}
|
||||
initialEndDate={selectedEndDate}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 xl:grid-cols-7 gap-4">
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-4">
|
||||
<MetricCard
|
||||
title="Total Sessions"
|
||||
value={metrics.totalSessions}
|
||||
@@ -375,70 +333,6 @@ function DashboardContent() {
|
||||
isPositive: (metrics.avgResponseTimeTrend ?? 0) <= 0, // Lower response time is better
|
||||
}}
|
||||
/>
|
||||
<MetricCard
|
||||
title="Avg. Daily Costs"
|
||||
value={`€${metrics.avgDailyCosts?.toFixed(4) || "0.0000"}`}
|
||||
icon={
|
||||
<svg
|
||||
className="h-5 w-5"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
strokeWidth="1"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M12 8c-1.657 0-3 .895-3 2s1.343 2 3 2 3 .895 3 2-1.343 2-3 2m0-8c1.11 0 2.08.402 2.599 1M12 8V7m0 1v8m0 0v1m0-1c-1.11 0-2.08-.402-2.599-1M21 12a9 9 0 11-18 0 9 9 0 0118 0z"
|
||||
/>
|
||||
</svg>
|
||||
}
|
||||
/>
|
||||
<MetricCard
|
||||
title="Peak Usage Time"
|
||||
value={metrics.peakUsageTime || "N/A"}
|
||||
icon={
|
||||
<svg
|
||||
className="h-5 w-5"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
strokeWidth="1"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M9 19v-6a2 2 0 00-2-2H5a2 2 0 00-2 2v6a2 2 0 002 2h2a2 2 0 002-2zm0 0V9a2 2 0 012-2h2a2 2 0 012 2v10m-6 0a2 2 0 002 2h2a2 2 0 002-2m0 0V5a2 2 0 012-2h2a2 2 0 012 2v14a2 2 0 01-2 2h-2a2 2 0 01-2-2z"
|
||||
/>
|
||||
</svg>
|
||||
}
|
||||
/>
|
||||
<MetricCard
|
||||
title="Resolved Chats"
|
||||
value={`${metrics.resolvedChatsPercentage?.toFixed(1) || "0.0"}%`}
|
||||
icon={
|
||||
<svg
|
||||
className="h-5 w-5"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
strokeWidth="1"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"
|
||||
/>
|
||||
</svg>
|
||||
}
|
||||
trend={{
|
||||
value: metrics.resolvedChatsPercentage ?? 0,
|
||||
isPositive: (metrics.resolvedChatsPercentage ?? 0) >= 80, // 80%+ resolution rate is good
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||
@@ -503,9 +397,6 @@ function DashboardContent() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Top Questions Chart */}
|
||||
<TopQuestionsChart data={metrics.topQuestions || []} />
|
||||
|
||||
<div className="bg-white p-6 rounded-xl shadow">
|
||||
<h3 className="font-bold text-lg text-gray-800 mb-4">
|
||||
Response Time Distribution
|
||||
|
||||
@@ -4,11 +4,14 @@ import { useEffect, useState } from "react";
|
||||
import { useParams, useRouter } from "next/navigation"; // Import useRouter
|
||||
import { useSession } from "next-auth/react"; // Import useSession
|
||||
import SessionDetails from "../../../../components/SessionDetails";
|
||||
|
||||
import MessageViewer from "../../../../components/MessageViewer";
|
||||
import TranscriptViewer from "../../../../components/TranscriptViewer";
|
||||
import { ChatSession } from "../../../../lib/types";
|
||||
import Link from "next/link";
|
||||
|
||||
interface SessionApiResponse {
|
||||
session: ChatSession;
|
||||
}
|
||||
|
||||
export default function SessionViewPage() {
|
||||
const params = useParams();
|
||||
const router = useRouter(); // Initialize useRouter
|
||||
@@ -31,13 +34,13 @@ export default function SessionViewPage() {
|
||||
try {
|
||||
const response = await fetch(`/api/dashboard/session/${id}`);
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
const errorData = (await response.json()) as { error: string; };
|
||||
throw new Error(
|
||||
errorData.error ||
|
||||
`Failed to fetch session: ${response.statusText}`
|
||||
);
|
||||
}
|
||||
const data = await response.json();
|
||||
const data = (await response.json()) as SessionApiResponse;
|
||||
setSession(data.session);
|
||||
} catch (err) {
|
||||
setError(
|
||||
@@ -137,26 +140,31 @@ export default function SessionViewPage() {
|
||||
<div>
|
||||
<SessionDetails session={session} />
|
||||
</div>
|
||||
|
||||
{/* Show parsed messages if available */}
|
||||
{session.messages && session.messages.length > 0 && (
|
||||
<div>
|
||||
<MessageViewer messages={session.messages} />
|
||||
{session.transcriptContent &&
|
||||
session.transcriptContent.trim() !== "" ? (
|
||||
<div className="mt-0">
|
||||
<TranscriptViewer
|
||||
transcriptContent={session.transcriptContent}
|
||||
transcriptUrl={session.fullTranscriptUrl}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Show transcript URL if available */}
|
||||
{session.fullTranscriptUrl && (
|
||||
) : (
|
||||
<div className="bg-white p-4 rounded-lg shadow">
|
||||
<h3 className="font-bold text-lg mb-3">Source Transcript</h3>
|
||||
<a
|
||||
href={session.fullTranscriptUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-sky-600 hover:underline"
|
||||
>
|
||||
View Original Transcript
|
||||
</a>
|
||||
<h3 className="font-bold text-lg mb-3">Transcript</h3>
|
||||
<p className="text-gray-600">
|
||||
No transcript content available for this session.
|
||||
</p>
|
||||
{session.fullTranscriptUrl &&
|
||||
process.env.NODE_ENV !== "production" && (
|
||||
<a
|
||||
href={session.fullTranscriptUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-sky-600 hover:underline mt-2 inline-block"
|
||||
>
|
||||
View Source Transcript URL
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -14,6 +14,11 @@ interface FilterOptions {
|
||||
languages: string[];
|
||||
}
|
||||
|
||||
interface SessionsApiResponse {
|
||||
sessions: ChatSession[];
|
||||
totalSessions: number;
|
||||
}
|
||||
|
||||
export default function SessionsPage() {
|
||||
const [sessions, setSessions] = useState<ChatSession[]>([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
@@ -58,7 +63,7 @@ export default function SessionsPage() {
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to fetch filter options");
|
||||
}
|
||||
const data = await response.json();
|
||||
const data = (await response.json()) as FilterOptions;
|
||||
setFilterOptions(data);
|
||||
} catch (err) {
|
||||
setError(
|
||||
@@ -88,7 +93,7 @@ export default function SessionsPage() {
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch sessions: ${response.statusText}`);
|
||||
}
|
||||
const data = await response.json();
|
||||
const data = (await response.json()) as SessionsApiResponse;
|
||||
setSessions(data.sessions || []);
|
||||
setTotalPages(Math.ceil((data.totalSessions || 0) / pageSize));
|
||||
} catch (err) {
|
||||
|
||||
@@ -12,6 +12,10 @@ interface UserManagementProps {
|
||||
session: UserSession;
|
||||
}
|
||||
|
||||
interface UsersApiResponse {
|
||||
users: UserItem[];
|
||||
}
|
||||
|
||||
export default function UserManagement({ session }: UserManagementProps) {
|
||||
const [users, setUsers] = useState<UserItem[]>([]);
|
||||
const [email, setEmail] = useState<string>("");
|
||||
@@ -21,7 +25,7 @@ export default function UserManagement({ session }: UserManagementProps) {
|
||||
useEffect(() => {
|
||||
fetch("/api/dashboard/users")
|
||||
.then((r) => r.json())
|
||||
.then((data) => setUsers(data.users));
|
||||
.then((data) => setUsers((data as UsersApiResponse).users));
|
||||
}, []);
|
||||
|
||||
async function inviteUser() {
|
||||
|
||||
@@ -9,6 +9,10 @@ interface UserItem {
|
||||
role: string;
|
||||
}
|
||||
|
||||
interface UsersApiResponse {
|
||||
users: UserItem[];
|
||||
}
|
||||
|
||||
export default function UserManagementPage() {
|
||||
const { data: session, status } = useSession();
|
||||
const [users, setUsers] = useState<UserItem[]>([]);
|
||||
@@ -27,7 +31,7 @@ export default function UserManagementPage() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch("/api/dashboard/users");
|
||||
const data = await res.json();
|
||||
const data = (await res.json()) as UsersApiResponse;
|
||||
setUsers(data.users);
|
||||
} catch (error) {
|
||||
console.error("Failed to fetch users:", error);
|
||||
@@ -52,7 +56,7 @@ export default function UserManagementPage() {
|
||||
// Refresh the user list
|
||||
fetchUsers();
|
||||
} else {
|
||||
const error = await res.json();
|
||||
const error = (await res.json()) as { message?: string; };
|
||||
setMessage(
|
||||
`Failed to invite user: ${error.message || "Unknown error"}`
|
||||
);
|
||||
|
||||
119
app/globals.css
119
app/globals.css
@@ -1,120 +1 @@
|
||||
@import "tailwindcss";
|
||||
@import "tw-animate-css";
|
||||
|
||||
@custom-variant dark (&:is(.dark *));
|
||||
|
||||
@theme inline {
|
||||
--radius-sm: calc(var(--radius) - 4px);
|
||||
--radius-md: calc(var(--radius) - 2px);
|
||||
--radius-lg: var(--radius);
|
||||
--radius-xl: calc(var(--radius) + 4px);
|
||||
--color-background: var(--background);
|
||||
--color-foreground: var(--foreground);
|
||||
--color-card: var(--card);
|
||||
--color-card-foreground: var(--card-foreground);
|
||||
--color-popover: var(--popover);
|
||||
--color-popover-foreground: var(--popover-foreground);
|
||||
--color-primary: var(--primary);
|
||||
--color-primary-foreground: var(--primary-foreground);
|
||||
--color-secondary: var(--secondary);
|
||||
--color-secondary-foreground: var(--secondary-foreground);
|
||||
--color-muted: var(--muted);
|
||||
--color-muted-foreground: var(--muted-foreground);
|
||||
--color-accent: var(--accent);
|
||||
--color-accent-foreground: var(--accent-foreground);
|
||||
--color-destructive: var(--destructive);
|
||||
--color-border: var(--border);
|
||||
--color-input: var(--input);
|
||||
--color-ring: var(--ring);
|
||||
--color-chart-1: var(--chart-1);
|
||||
--color-chart-2: var(--chart-2);
|
||||
--color-chart-3: var(--chart-3);
|
||||
--color-chart-4: var(--chart-4);
|
||||
--color-chart-5: var(--chart-5);
|
||||
--color-sidebar: var(--sidebar);
|
||||
--color-sidebar-foreground: var(--sidebar-foreground);
|
||||
--color-sidebar-primary: var(--sidebar-primary);
|
||||
--color-sidebar-primary-foreground: var(--sidebar-primary-foreground);
|
||||
--color-sidebar-accent: var(--sidebar-accent);
|
||||
--color-sidebar-accent-foreground: var(--sidebar-accent-foreground);
|
||||
--color-sidebar-border: var(--sidebar-border);
|
||||
--color-sidebar-ring: var(--sidebar-ring);
|
||||
}
|
||||
|
||||
:root {
|
||||
--radius: 0.625rem;
|
||||
--background: oklch(1 0 0);
|
||||
--foreground: oklch(0.145 0 0);
|
||||
--card: oklch(1 0 0);
|
||||
--card-foreground: oklch(0.145 0 0);
|
||||
--popover: oklch(1 0 0);
|
||||
--popover-foreground: oklch(0.145 0 0);
|
||||
--primary: oklch(0.205 0 0);
|
||||
--primary-foreground: oklch(0.985 0 0);
|
||||
--secondary: oklch(0.97 0 0);
|
||||
--secondary-foreground: oklch(0.205 0 0);
|
||||
--muted: oklch(0.97 0 0);
|
||||
--muted-foreground: oklch(0.556 0 0);
|
||||
--accent: oklch(0.97 0 0);
|
||||
--accent-foreground: oklch(0.205 0 0);
|
||||
--destructive: oklch(0.577 0.245 27.325);
|
||||
--border: oklch(0.922 0 0);
|
||||
--input: oklch(0.922 0 0);
|
||||
--ring: oklch(0.708 0 0);
|
||||
--chart-1: oklch(0.646 0.222 41.116);
|
||||
--chart-2: oklch(0.6 0.118 184.704);
|
||||
--chart-3: oklch(0.398 0.07 227.392);
|
||||
--chart-4: oklch(0.828 0.189 84.429);
|
||||
--chart-5: oklch(0.769 0.188 70.08);
|
||||
--sidebar: oklch(0.985 0 0);
|
||||
--sidebar-foreground: oklch(0.145 0 0);
|
||||
--sidebar-primary: oklch(0.205 0 0);
|
||||
--sidebar-primary-foreground: oklch(0.985 0 0);
|
||||
--sidebar-accent: oklch(0.97 0 0);
|
||||
--sidebar-accent-foreground: oklch(0.205 0 0);
|
||||
--sidebar-border: oklch(0.922 0 0);
|
||||
--sidebar-ring: oklch(0.708 0 0);
|
||||
}
|
||||
|
||||
.dark {
|
||||
--background: oklch(0.145 0 0);
|
||||
--foreground: oklch(0.985 0 0);
|
||||
--card: oklch(0.205 0 0);
|
||||
--card-foreground: oklch(0.985 0 0);
|
||||
--popover: oklch(0.205 0 0);
|
||||
--popover-foreground: oklch(0.985 0 0);
|
||||
--primary: oklch(0.922 0 0);
|
||||
--primary-foreground: oklch(0.205 0 0);
|
||||
--secondary: oklch(0.269 0 0);
|
||||
--secondary-foreground: oklch(0.985 0 0);
|
||||
--muted: oklch(0.269 0 0);
|
||||
--muted-foreground: oklch(0.708 0 0);
|
||||
--accent: oklch(0.269 0 0);
|
||||
--accent-foreground: oklch(0.985 0 0);
|
||||
--destructive: oklch(0.704 0.191 22.216);
|
||||
--border: oklch(1 0 0 / 10%);
|
||||
--input: oklch(1 0 0 / 15%);
|
||||
--ring: oklch(0.556 0 0);
|
||||
--chart-1: oklch(0.488 0.243 264.376);
|
||||
--chart-2: oklch(0.696 0.17 162.48);
|
||||
--chart-3: oklch(0.769 0.188 70.08);
|
||||
--chart-4: oklch(0.627 0.265 303.9);
|
||||
--chart-5: oklch(0.645 0.246 16.439);
|
||||
--sidebar: oklch(0.205 0 0);
|
||||
--sidebar-foreground: oklch(0.985 0 0);
|
||||
--sidebar-primary: oklch(0.488 0.243 264.376);
|
||||
--sidebar-primary-foreground: oklch(0.985 0 0);
|
||||
--sidebar-accent: oklch(0.269 0 0);
|
||||
--sidebar-accent-foreground: oklch(0.985 0 0);
|
||||
--sidebar-border: oklch(1 0 0 / 10%);
|
||||
--sidebar-ring: oklch(0.556 0 0);
|
||||
}
|
||||
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border outline-ring/50;
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { getServerSession } from "next-auth";
|
||||
import { redirect } from "next/navigation";
|
||||
import { authOptions } from "./api/auth/[...nextauth]/route";
|
||||
import { authOptions } from "../pages/api/auth/[...nextauth]";
|
||||
|
||||
export default async function HomePage() {
|
||||
const session = await getServerSession(authOptions);
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
{
|
||||
"$schema": "https://ui.shadcn.com/schema.json",
|
||||
"style": "new-york",
|
||||
"rsc": true,
|
||||
"tsx": true,
|
||||
"tailwind": {
|
||||
"config": "tailwind.config.js",
|
||||
"css": "app/globals.css",
|
||||
"baseColor": "neutral",
|
||||
"cssVariables": true,
|
||||
"prefix": ""
|
||||
},
|
||||
"aliases": {
|
||||
"components": "@/components",
|
||||
"utils": "@/lib/utils",
|
||||
"ui": "@/components/ui",
|
||||
"lib": "@/lib",
|
||||
"hooks": "@/hooks"
|
||||
},
|
||||
"iconLibrary": "lucide"
|
||||
}
|
||||
@@ -1,155 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
|
||||
interface DateRangePickerProps {
|
||||
minDate: string;
|
||||
maxDate: string;
|
||||
onDateRangeChange: (startDate: string, endDate: string) => void;
|
||||
initialStartDate?: string;
|
||||
initialEndDate?: string;
|
||||
}
|
||||
|
||||
export default function DateRangePicker({
|
||||
minDate,
|
||||
maxDate,
|
||||
onDateRangeChange,
|
||||
initialStartDate,
|
||||
initialEndDate,
|
||||
}: DateRangePickerProps) {
|
||||
const [startDate, setStartDate] = useState(initialStartDate || minDate);
|
||||
const [endDate, setEndDate] = useState(initialEndDate || maxDate);
|
||||
|
||||
useEffect(() => {
|
||||
// Notify parent component when dates change
|
||||
onDateRangeChange(startDate, endDate);
|
||||
}, [startDate, endDate, onDateRangeChange]);
|
||||
|
||||
const handleStartDateChange = (newStartDate: string) => {
|
||||
// Ensure start date is not before min date
|
||||
if (newStartDate < minDate) {
|
||||
setStartDate(minDate);
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure start date is not after end date
|
||||
if (newStartDate > endDate) {
|
||||
setEndDate(newStartDate);
|
||||
}
|
||||
|
||||
setStartDate(newStartDate);
|
||||
};
|
||||
|
||||
const handleEndDateChange = (newEndDate: string) => {
|
||||
// Ensure end date is not after max date
|
||||
if (newEndDate > maxDate) {
|
||||
setEndDate(maxDate);
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure end date is not before start date
|
||||
if (newEndDate < startDate) {
|
||||
setStartDate(newEndDate);
|
||||
}
|
||||
|
||||
setEndDate(newEndDate);
|
||||
};
|
||||
|
||||
const resetToFullRange = () => {
|
||||
setStartDate(minDate);
|
||||
setEndDate(maxDate);
|
||||
};
|
||||
|
||||
const setLast30Days = () => {
|
||||
const thirtyDaysAgo = new Date();
|
||||
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
|
||||
const thirtyDaysAgoStr = thirtyDaysAgo.toISOString().split("T")[0];
|
||||
|
||||
// Use the later of 30 days ago or minDate
|
||||
const newStartDate =
|
||||
thirtyDaysAgoStr > minDate ? thirtyDaysAgoStr : minDate;
|
||||
setStartDate(newStartDate);
|
||||
setEndDate(maxDate);
|
||||
};
|
||||
|
||||
const setLast7Days = () => {
|
||||
const sevenDaysAgo = new Date();
|
||||
sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7);
|
||||
const sevenDaysAgoStr = sevenDaysAgo.toISOString().split("T")[0];
|
||||
|
||||
// Use the later of 7 days ago or minDate
|
||||
const newStartDate = sevenDaysAgoStr > minDate ? sevenDaysAgoStr : minDate;
|
||||
setStartDate(newStartDate);
|
||||
setEndDate(maxDate);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="bg-white p-4 rounded-lg shadow-sm border border-gray-200">
|
||||
<div className="flex flex-col sm:flex-row gap-4 items-start sm:items-center">
|
||||
<div className="flex flex-col sm:flex-row gap-3 items-start sm:items-center">
|
||||
<label className="text-sm font-medium text-gray-700 whitespace-nowrap">
|
||||
Date Range:
|
||||
</label>
|
||||
|
||||
<div className="flex flex-col sm:flex-row gap-2 items-start sm:items-center">
|
||||
<div className="flex items-center gap-2">
|
||||
<label htmlFor="start-date" className="text-sm text-gray-600">
|
||||
From:
|
||||
</label>
|
||||
<input
|
||||
id="start-date"
|
||||
type="date"
|
||||
value={startDate}
|
||||
min={minDate}
|
||||
max={maxDate}
|
||||
onChange={(e) => handleStartDateChange(e.target.value)}
|
||||
className="px-3 py-1.5 border border-gray-300 rounded-md text-sm focus:outline-none focus:ring-2 focus:ring-sky-500 focus:border-sky-500"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<label htmlFor="end-date" className="text-sm text-gray-600">
|
||||
To:
|
||||
</label>
|
||||
<input
|
||||
id="end-date"
|
||||
type="date"
|
||||
value={endDate}
|
||||
min={minDate}
|
||||
max={maxDate}
|
||||
onChange={(e) => handleEndDateChange(e.target.value)}
|
||||
className="px-3 py-1.5 border border-gray-300 rounded-md text-sm focus:outline-none focus:ring-2 focus:ring-sky-500 focus:border-sky-500"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<button
|
||||
onClick={setLast7Days}
|
||||
className="px-3 py-1.5 text-xs font-medium text-sky-600 bg-sky-50 border border-sky-200 rounded-md hover:bg-sky-100 transition-colors"
|
||||
>
|
||||
Last 7 days
|
||||
</button>
|
||||
<button
|
||||
onClick={setLast30Days}
|
||||
className="px-3 py-1.5 text-xs font-medium text-sky-600 bg-sky-50 border border-sky-200 rounded-md hover:bg-sky-100 transition-colors"
|
||||
>
|
||||
Last 30 days
|
||||
</button>
|
||||
<button
|
||||
onClick={resetToFullRange}
|
||||
className="px-3 py-1.5 text-xs font-medium text-gray-600 bg-gray-50 border border-gray-200 rounded-md hover:bg-gray-100 transition-colors"
|
||||
>
|
||||
All time
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mt-2 text-xs text-gray-500">
|
||||
Available data: {new Date(minDate).toLocaleDateString()} -{" "}
|
||||
{new Date(maxDate).toLocaleDateString()}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -25,30 +25,6 @@ const getCountryCoordinates = (): Record<string, [number, number]> => {
|
||||
US: [37.0902, -95.7129],
|
||||
GB: [55.3781, -3.436],
|
||||
BA: [43.9159, 17.6791],
|
||||
NL: [52.1326, 5.2913],
|
||||
DE: [51.1657, 10.4515],
|
||||
FR: [46.6034, 1.8883],
|
||||
IT: [41.8719, 12.5674],
|
||||
ES: [40.4637, -3.7492],
|
||||
CA: [56.1304, -106.3468],
|
||||
PL: [51.9194, 19.1451],
|
||||
SE: [60.1282, 18.6435],
|
||||
NO: [60.472, 8.4689],
|
||||
FI: [61.9241, 25.7482],
|
||||
CH: [46.8182, 8.2275],
|
||||
AT: [47.5162, 14.5501],
|
||||
BE: [50.8503, 4.3517],
|
||||
DK: [56.2639, 9.5018],
|
||||
CZ: [49.8175, 15.473],
|
||||
HU: [47.1625, 19.5033],
|
||||
PT: [39.3999, -8.2245],
|
||||
GR: [39.0742, 21.8243],
|
||||
RO: [45.9432, 24.9668],
|
||||
IE: [53.4129, -8.2439],
|
||||
BG: [42.7339, 25.4858],
|
||||
HR: [45.1, 15.2],
|
||||
SK: [48.669, 19.699],
|
||||
SI: [46.1512, 14.9955],
|
||||
};
|
||||
// This function now primarily returns fallbacks.
|
||||
// The actual fetching using @rapideditor/country-coder will be in the component's useEffect.
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Message } from "../lib/types";
|
||||
|
||||
interface MessageViewerProps {
|
||||
messages: Message[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Component to display parsed messages in a chat-like format
|
||||
*/
|
||||
export default function MessageViewer({ messages }: MessageViewerProps) {
|
||||
if (!messages || messages.length === 0) {
|
||||
return (
|
||||
<div className="bg-white p-4 rounded-lg shadow">
|
||||
<h3 className="font-bold text-lg mb-3">Conversation</h3>
|
||||
<p className="text-gray-500 italic">No parsed messages available</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="bg-white p-4 rounded-lg shadow">
|
||||
<h3 className="font-bold text-lg mb-3">
|
||||
Conversation ({messages.length} messages)
|
||||
</h3>
|
||||
|
||||
<div className="space-y-3 max-h-96 overflow-y-auto">
|
||||
{messages.map((message) => (
|
||||
<div
|
||||
key={message.id}
|
||||
className={`flex ${
|
||||
message.role.toLowerCase() === "user"
|
||||
? "justify-end"
|
||||
: "justify-start"
|
||||
}`}
|
||||
>
|
||||
<div
|
||||
className={`max-w-xs lg:max-w-md px-4 py-2 rounded-lg ${
|
||||
message.role.toLowerCase() === "user"
|
||||
? "bg-blue-500 text-white"
|
||||
: message.role.toLowerCase() === "assistant"
|
||||
? "bg-gray-200 text-gray-800"
|
||||
: "bg-yellow-100 text-yellow-800"
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-center justify-between mb-1">
|
||||
<span className="text-xs font-medium opacity-75 mr-2">
|
||||
{message.role}
|
||||
</span>
|
||||
<span className="text-xs opacity-75 ml-2">
|
||||
{new Date(message.timestamp).toLocaleTimeString()}
|
||||
</span>
|
||||
</div>
|
||||
<div className="text-sm whitespace-pre-wrap">
|
||||
{message.content}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div className="mt-4 pt-3 border-t text-sm text-gray-500">
|
||||
<div className="flex justify-between">
|
||||
<span>
|
||||
First message: {new Date(messages[0].timestamp).toLocaleString()}
|
||||
</span>
|
||||
<span>
|
||||
Last message:{" "}
|
||||
{new Date(messages[messages.length - 1].timestamp).toLocaleString()}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -15,10 +15,11 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
||||
return (
|
||||
<div className="bg-white p-4 rounded-lg shadow">
|
||||
<h3 className="font-bold text-lg mb-3">Session Details</h3>
|
||||
<div className="space-y-3">
|
||||
|
||||
<div className="space-y-2">
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">Session ID:</span>
|
||||
<span className="font-medium font-mono text-sm">{session.id}</span>
|
||||
<span className="font-medium">{session.sessionId || session.id}</span>
|
||||
</div>
|
||||
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
@@ -70,7 +71,7 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
||||
|
||||
{session.sentiment !== null && session.sentiment !== undefined && (
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">Sentiment Score:</span>
|
||||
<span className="text-gray-600">Sentiment:</span>
|
||||
<span
|
||||
className={`font-medium ${
|
||||
session.sentiment > 0.3
|
||||
@@ -90,23 +91,6 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{session.sentimentCategory && (
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">AI Sentiment:</span>
|
||||
<span
|
||||
className={`font-medium capitalize ${
|
||||
session.sentimentCategory === "positive"
|
||||
? "text-green-500"
|
||||
: session.sentimentCategory === "negative"
|
||||
? "text-red-500"
|
||||
: "text-orange-500"
|
||||
}`}
|
||||
>
|
||||
{session.sentimentCategory}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">Messages Sent:</span>
|
||||
<span className="font-medium">{session.messagesSent || 0}</span>
|
||||
@@ -158,82 +142,24 @@ export default function SessionDetails({ session }: SessionDetailsProps) {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{session.ipAddress && (
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">IP Address:</span>
|
||||
<span className="font-medium font-mono text-sm">
|
||||
{session.ipAddress}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{session.processed !== null && session.processed !== undefined && (
|
||||
<div className="flex justify-between border-b pb-2">
|
||||
<span className="text-gray-600">AI Processed:</span>
|
||||
<span
|
||||
className={`font-medium ${session.processed ? "text-green-500" : "text-gray-500"}`}
|
||||
>
|
||||
{session.processed ? "Yes" : "No"}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{session.initialMsg && (
|
||||
<div className="border-b pb-2">
|
||||
<span className="text-gray-600 block mb-1">Initial Message:</span>
|
||||
<div className="bg-gray-50 p-2 rounded text-sm italic">
|
||||
"{session.initialMsg}"
|
||||
{/* Transcript rendering is now handled by the parent page (app/dashboard/sessions/[id]/page.tsx) */}
|
||||
{/* Fallback to link only if we only have the URL but no content - this might also be redundant if parent handles all transcript display */}
|
||||
{(!session.transcriptContent ||
|
||||
session.transcriptContent.length === 0) &&
|
||||
session.fullTranscriptUrl &&
|
||||
process.env.NODE_ENV !== "production" && (
|
||||
<div className="flex justify-between pt-2">
|
||||
<span className="text-gray-600">Transcript:</span>
|
||||
<a
|
||||
href={session.fullTranscriptUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-blue-500 hover:text-blue-700 underline"
|
||||
>
|
||||
View Full Transcript
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{session.summary && (
|
||||
<div className="border-b pb-2">
|
||||
<span className="text-gray-600 block mb-1">AI Summary:</span>
|
||||
<div className="bg-blue-50 p-2 rounded text-sm">
|
||||
{session.summary}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{session.questions && (
|
||||
<div className="border-b pb-2">
|
||||
<span className="text-gray-600 block mb-1">Questions Asked:</span>
|
||||
<div className="bg-yellow-50 p-2 rounded text-sm">
|
||||
{(() => {
|
||||
try {
|
||||
const questions = JSON.parse(session.questions);
|
||||
if (Array.isArray(questions) && questions.length > 0) {
|
||||
return (
|
||||
<ul className="list-disc list-inside space-y-1">
|
||||
{questions.map((question: string, index: number) => (
|
||||
<li key={index}>{question}</li>
|
||||
))}
|
||||
</ul>
|
||||
);
|
||||
}
|
||||
return "No questions identified";
|
||||
} catch {
|
||||
return session.questions;
|
||||
}
|
||||
})()}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{session.fullTranscriptUrl && (
|
||||
<div className="flex justify-between pt-2">
|
||||
<span className="text-gray-600">Transcript:</span>
|
||||
<a
|
||||
href={session.fullTranscriptUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-blue-500 hover:text-blue-700 underline"
|
||||
>
|
||||
View Full Transcript
|
||||
</a>
|
||||
</div>
|
||||
)}
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import { TopQuestion } from "../lib/types";
|
||||
|
||||
interface TopQuestionsChartProps {
|
||||
data: TopQuestion[];
|
||||
title?: string;
|
||||
}
|
||||
|
||||
export default function TopQuestionsChart({
|
||||
data,
|
||||
title = "Top 5 Asked Questions",
|
||||
}: TopQuestionsChartProps) {
|
||||
if (!data || data.length === 0) {
|
||||
return (
|
||||
<div className="bg-white p-6 rounded-lg shadow-sm border border-gray-200">
|
||||
<h3 className="text-lg font-semibold text-gray-900 mb-4">{title}</h3>
|
||||
<div className="text-center py-8 text-gray-500">
|
||||
No questions data available
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Find the maximum count to calculate relative bar widths
|
||||
const maxCount = Math.max(...data.map((q) => q.count));
|
||||
|
||||
return (
|
||||
<div className="bg-white p-6 rounded-lg shadow-sm border border-gray-200">
|
||||
<h3 className="text-lg font-semibold text-gray-900 mb-4">{title}</h3>
|
||||
|
||||
<div className="space-y-4">
|
||||
{data.map((question, index) => {
|
||||
const percentage =
|
||||
maxCount > 0 ? (question.count / maxCount) * 100 : 0;
|
||||
|
||||
return (
|
||||
<div key={index} className="relative">
|
||||
{/* Question text */}
|
||||
<div className="flex justify-between items-start mb-2">
|
||||
<p className="text-sm text-gray-700 font-medium leading-tight pr-4 flex-1">
|
||||
{question.question}
|
||||
</p>
|
||||
<span className="text-sm font-semibold text-gray-900 bg-gray-100 px-2 py-1 rounded-md whitespace-nowrap">
|
||||
{question.count}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Progress bar */}
|
||||
<div className="w-full bg-gray-200 rounded-full h-2">
|
||||
<div
|
||||
className="bg-blue-600 h-2 rounded-full transition-all duration-300 ease-in-out"
|
||||
style={{ width: `${percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Rank indicator */}
|
||||
<div className="absolute -left-2 top-0 w-6 h-6 bg-blue-600 text-white text-xs font-bold rounded-full flex items-center justify-center">
|
||||
{index + 1}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
{/* Summary */}
|
||||
<div className="mt-6 pt-4 border-t border-gray-200">
|
||||
<div className="flex justify-between text-sm text-gray-600">
|
||||
<span>Total questions analyzed</span>
|
||||
<span className="font-medium">
|
||||
{data.reduce((sum, q) => sum + q.count, 0)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useState } from "react";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
import rehypeRaw from "rehype-raw"; // Import rehype-raw
|
||||
import rehypeRaw from "rehype-raw";
|
||||
|
||||
interface TranscriptViewerProps {
|
||||
transcriptContent: string;
|
||||
@@ -23,6 +23,7 @@ function formatTranscript(content: string): React.ReactNode[] {
|
||||
const elements: React.ReactNode[] = [];
|
||||
let currentSpeaker: string | null = null;
|
||||
let currentMessages: string[] = [];
|
||||
let currentTimestamp: string | null = null;
|
||||
|
||||
// Process each line
|
||||
lines.forEach((line) => {
|
||||
@@ -32,8 +33,15 @@ function formatTranscript(content: string): React.ReactNode[] {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if this is a new speaker line
|
||||
if (line.startsWith("User:") || line.startsWith("Assistant:")) {
|
||||
// Check if this is a new speaker line with or without datetime
|
||||
// Format 1: [29.05.2025 21:26:44] User: message
|
||||
// Format 2: User: message
|
||||
const datetimeMatch = line.match(
|
||||
/^\[([^\]]+)\]\s*(User|Assistant):\s*(.*)$/
|
||||
);
|
||||
const simpleMatch = line.match(/^(User|Assistant):\s*(.*)$/);
|
||||
|
||||
if (datetimeMatch || simpleMatch) {
|
||||
// If we have accumulated messages for a previous speaker, add them
|
||||
if (currentSpeaker && currentMessages.length > 0) {
|
||||
elements.push(
|
||||
@@ -48,6 +56,11 @@ function formatTranscript(content: string): React.ReactNode[] {
|
||||
: "bg-gray-100 text-gray-800"
|
||||
}`}
|
||||
>
|
||||
{currentTimestamp && (
|
||||
<div className="text-xs opacity-60 mb-1">
|
||||
{currentTimestamp}
|
||||
</div>
|
||||
)}
|
||||
{currentMessages.map((msg, i) => (
|
||||
// Use ReactMarkdown to render each message part
|
||||
<ReactMarkdown
|
||||
@@ -73,12 +86,22 @@ function formatTranscript(content: string): React.ReactNode[] {
|
||||
currentMessages = [];
|
||||
}
|
||||
|
||||
// Set the new current speaker
|
||||
currentSpeaker = line.startsWith("User:") ? "User" : "Assistant";
|
||||
// Add the content after "User:" or "Assistant:"
|
||||
const messageContent = line.substring(line.indexOf(":") + 1).trim();
|
||||
if (messageContent) {
|
||||
currentMessages.push(messageContent);
|
||||
if (datetimeMatch) {
|
||||
// Format with datetime: [29.05.2025 21:26:44] User: message
|
||||
currentTimestamp = datetimeMatch[1];
|
||||
currentSpeaker = datetimeMatch[2];
|
||||
const messageContent = datetimeMatch[3].trim();
|
||||
if (messageContent) {
|
||||
currentMessages.push(messageContent);
|
||||
}
|
||||
} else if (simpleMatch) {
|
||||
// Format without datetime: User: message
|
||||
currentTimestamp = null;
|
||||
currentSpeaker = simpleMatch[1];
|
||||
const messageContent = simpleMatch[2].trim();
|
||||
if (messageContent) {
|
||||
currentMessages.push(messageContent);
|
||||
}
|
||||
}
|
||||
} else if (currentSpeaker) {
|
||||
// This is a continuation of the current speaker's message
|
||||
@@ -100,6 +123,9 @@ function formatTranscript(content: string): React.ReactNode[] {
|
||||
: "bg-gray-100 text-gray-800"
|
||||
}`}
|
||||
>
|
||||
{currentTimestamp && (
|
||||
<div className="text-xs opacity-60 mb-1">{currentTimestamp}</div>
|
||||
)}
|
||||
{currentMessages.map((msg, i) => (
|
||||
// Use ReactMarkdown to render each message part
|
||||
<ReactMarkdown
|
||||
@@ -138,6 +164,9 @@ export default function TranscriptViewer({
|
||||
|
||||
const formattedElements = formatTranscript(transcriptContent);
|
||||
|
||||
// Hide "View Full Raw" button in production environment
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
|
||||
return (
|
||||
<div className="bg-white shadow-lg rounded-lg p-4 md:p-6 mt-6">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
@@ -145,7 +174,7 @@ export default function TranscriptViewer({
|
||||
Session Transcript
|
||||
</h2>
|
||||
<div className="flex items-center space-x-3">
|
||||
{transcriptUrl && (
|
||||
{transcriptUrl && !isProduction && (
|
||||
<a
|
||||
href={transcriptUrl}
|
||||
target="_blank"
|
||||
|
||||
227
docs/D1_CLI_ACCESS.md
Normal file
227
docs/D1_CLI_ACCESS.md
Normal file
@@ -0,0 +1,227 @@
|
||||
# D1 Database Command Line Access
|
||||
|
||||
This guide shows you how to access and manage your Cloudflare D1 database `d1-notso-livedash` from the command line.
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Using the Custom D1 CLI Script
|
||||
|
||||
```bash
|
||||
# Simple and fast commands
|
||||
pnpm d1 tables # List all tables
|
||||
pnpm d1 info # Database information
|
||||
pnpm d1 schema User # Show table schema
|
||||
pnpm d1 query "SELECT COUNT(*) FROM User" # Execute SQL
|
||||
pnpm d1 export backup.sql # Export database
|
||||
|
||||
# Remote (production) commands
|
||||
pnpm d1 --remote info # Production database info
|
||||
pnpm d1 --remote query "SELECT * FROM Company LIMIT 5"
|
||||
```
|
||||
|
||||
### Using Package.json Scripts
|
||||
|
||||
```bash
|
||||
# Database information
|
||||
pnpm d1:list # List all D1 databases
|
||||
pnpm d1:info # Local database info
|
||||
pnpm d1:info:remote # Remote database info
|
||||
|
||||
# Backup and export
|
||||
pnpm d1:export # Export local database
|
||||
pnpm d1:export:remote # Export remote database
|
||||
pnpm d1:schema # Export schema only
|
||||
```
|
||||
|
||||
### Direct Wrangler Commands
|
||||
|
||||
```bash
|
||||
# Basic operations
|
||||
npx wrangler d1 list
|
||||
npx wrangler d1 info d1-notso-livedash
|
||||
npx wrangler d1 execute d1-notso-livedash --command "SELECT * FROM User"
|
||||
|
||||
# Remote operations (add --remote flag)
|
||||
npx wrangler d1 info d1-notso-livedash --remote
|
||||
npx wrangler d1 execute d1-notso-livedash --remote --command "SELECT COUNT(*) FROM Company"
|
||||
```
|
||||
|
||||
## Database Schema
|
||||
|
||||
Your D1 database contains these tables:
|
||||
|
||||
### Company Table
|
||||
|
||||
```sql
|
||||
- id (TEXT, PRIMARY KEY)
|
||||
- name (TEXT, NOT NULL)
|
||||
- csvUrl (TEXT, NOT NULL)
|
||||
- csvUsername (TEXT)
|
||||
- csvPassword (TEXT)
|
||||
- sentimentAlert (REAL)
|
||||
- dashboardOpts (TEXT)
|
||||
- createdAt (DATETIME, NOT NULL, DEFAULT CURRENT_TIMESTAMP)
|
||||
- updatedAt (DATETIME, NOT NULL)
|
||||
```
|
||||
|
||||
### User Table
|
||||
|
||||
```sql
|
||||
- id (TEXT, PRIMARY KEY)
|
||||
- email (TEXT, NOT NULL)
|
||||
- password (TEXT, NOT NULL)
|
||||
- companyId (TEXT, NOT NULL)
|
||||
- role (TEXT, NOT NULL)
|
||||
- resetToken (TEXT)
|
||||
- resetTokenExpiry (DATETIME)
|
||||
```
|
||||
|
||||
### Session Table
|
||||
|
||||
```sql
|
||||
- id (TEXT, PRIMARY KEY)
|
||||
- userId (TEXT, NOT NULL)
|
||||
- expiresAt (DATETIME, NOT NULL)
|
||||
```
|
||||
|
||||
## Common SQL Queries
|
||||
|
||||
### Data Exploration
|
||||
|
||||
```sql
|
||||
-- Check table sizes
|
||||
SELECT 'Company' as table_name, COUNT(*) as count FROM Company
|
||||
UNION ALL
|
||||
SELECT 'User' as table_name, COUNT(*) as count FROM User
|
||||
UNION ALL
|
||||
SELECT 'Session' as table_name, COUNT(*) as count FROM Session;
|
||||
|
||||
-- Show all table names
|
||||
SELECT name FROM sqlite_master WHERE type='table' ORDER BY name;
|
||||
|
||||
-- Get table schema
|
||||
PRAGMA table_info(User);
|
||||
```
|
||||
|
||||
### Business Queries
|
||||
|
||||
```sql
|
||||
-- List companies with user counts
|
||||
SELECT c.name, c.id, COUNT(u.id) as user_count
|
||||
FROM Company c
|
||||
LEFT JOIN User u ON c.id = u.companyId
|
||||
GROUP BY c.id, c.name;
|
||||
|
||||
-- Find admin users
|
||||
SELECT u.email, c.name as company
|
||||
FROM User u
|
||||
JOIN Company c ON u.companyId = c.id
|
||||
WHERE u.role = 'admin';
|
||||
|
||||
-- Active sessions
|
||||
SELECT COUNT(*) as active_sessions
|
||||
FROM Session
|
||||
WHERE expiresAt > datetime('now');
|
||||
```
|
||||
|
||||
## Local vs Remote Databases
|
||||
|
||||
- **Local Database**: Located at `.wrangler/state/v3/d1/` (for development)
|
||||
- **Remote Database**: Cloudflare's production D1 database
|
||||
|
||||
### When to Use Each:
|
||||
|
||||
- **Local**: Development, testing, safe experimentation
|
||||
- **Remote**: Production data, deployment verification
|
||||
|
||||
## Database Statistics
|
||||
|
||||
Current database info:
|
||||
|
||||
- **Database ID**: d4ee7efe-d37a-48e4-bed7-fdfaa5108131
|
||||
- **Region**: WEUR (Western Europe)
|
||||
- **Size**: ~53.2 kB
|
||||
- **Tables**: 6 (including system tables)
|
||||
- **Read Queries (24h)**: 65
|
||||
- **Write Queries (24h)**: 8
|
||||
|
||||
## Scripts Available
|
||||
|
||||
### `/scripts/d1.js` (Recommended)
|
||||
|
||||
Simple, fast CLI for common operations:
|
||||
|
||||
```bash
|
||||
node scripts/d1.js tables
|
||||
node scripts/d1.js schema User
|
||||
node scripts/d1.js query "SELECT * FROM Company"
|
||||
node scripts/d1.js --remote info
|
||||
```
|
||||
|
||||
### `/scripts/d1-query.js`
|
||||
|
||||
Simple query executor:
|
||||
|
||||
```bash
|
||||
node scripts/d1-query.js "SELECT COUNT(*) FROM User"
|
||||
node scripts/d1-query.js --remote "SELECT * FROM Company"
|
||||
```
|
||||
|
||||
### `/scripts/d1-manager.js`
|
||||
|
||||
Comprehensive database management (if needed for advanced operations):
|
||||
|
||||
```bash
|
||||
node scripts/d1-manager.js info
|
||||
node scripts/d1-manager.js backup
|
||||
```
|
||||
|
||||
## Backup and Recovery
|
||||
|
||||
### Create Backups
|
||||
|
||||
```bash
|
||||
# Quick backup
|
||||
pnpm d1 export backup_$(date +%Y%m%d).sql
|
||||
|
||||
# Automated backup with timestamp
|
||||
npx wrangler d1 export d1-notso-livedash --output backups/backup_$(date +%Y%m%d_%H%M%S).sql
|
||||
|
||||
# Schema only backup
|
||||
npx wrangler d1 export d1-notso-livedash --no-data --output schema.sql
|
||||
```
|
||||
|
||||
### Restore from Backup
|
||||
|
||||
```bash
|
||||
# Apply SQL file to database
|
||||
npx wrangler d1 execute d1-notso-livedash --file backup.sql
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **"wrangler not found"**: Use `npx wrangler` instead of `wrangler`
|
||||
2. **Permission denied**: Ensure you're logged into Cloudflare: `npx wrangler login`
|
||||
3. **Database not found**: Check `wrangler.json` for correct binding name
|
||||
|
||||
### Debug Commands
|
||||
|
||||
```bash
|
||||
# Check Wrangler authentication
|
||||
npx wrangler whoami
|
||||
|
||||
# Verify database configuration
|
||||
npx wrangler d1 list
|
||||
|
||||
# Test database connectivity
|
||||
npx wrangler d1 execute d1-notso-livedash --command "SELECT 1"
|
||||
```
|
||||
|
||||
## Security Notes
|
||||
|
||||
- Local database is for development only
|
||||
- Never expose production database credentials
|
||||
- Use `--remote` flag carefully in production
|
||||
- Regular backups are recommended for production data
|
||||
@@ -1,213 +0,0 @@
|
||||
# 🤖 Automated Processing System Documentation
|
||||
|
||||
## 🎯 Overview
|
||||
|
||||
The LiveDash system now features a complete automated processing pipeline that:
|
||||
- ✅ **Processes ALL unprocessed sessions** in batches until completion
|
||||
- ✅ **Runs hourly** to check for new unprocessed sessions
|
||||
- ✅ **Triggers automatically** when dashboard refresh is pressed
|
||||
- ✅ **Validates data quality** and filters out low-quality sessions
|
||||
- ✅ **Requires zero manual intervention** for ongoing operations
|
||||
|
||||
---
|
||||
|
||||
## 🔄 Complete Workflow
|
||||
|
||||
### 1. **CSV Import** (Automatic/Manual)
|
||||
```
|
||||
📥 CSV Data → Session Records (processed: false)
|
||||
```
|
||||
- **Automatic**: Hourly scheduler imports new CSV data
|
||||
- **Manual**: Dashboard refresh button triggers immediate import
|
||||
- **Result**: New sessions created with `processed: false`
|
||||
|
||||
### 2. **Transcript Fetching** (As Needed)
|
||||
```
|
||||
🔗 fullTranscriptUrl → Message Records
|
||||
```
|
||||
- **Script**: `node scripts/fetch-and-parse-transcripts.js`
|
||||
- **Purpose**: Convert transcript URLs into message records
|
||||
- **Status**: Only sessions with messages can be AI processed
|
||||
|
||||
### 3. **AI Processing** (Automatic/Manual)
|
||||
```
|
||||
💬 Messages → 🤖 OpenAI Analysis → 📊 Structured Data
|
||||
```
|
||||
- **Automatic**: Hourly scheduler processes all unprocessed sessions
|
||||
- **Manual**: Dashboard refresh or direct script execution
|
||||
- **Batch Processing**: Processes ALL unprocessed sessions until none remain
|
||||
- **Quality Validation**: Filters out empty questions and short summaries
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Automated Triggers
|
||||
|
||||
### **Hourly Scheduler**
|
||||
```javascript
|
||||
// Runs every hour automatically
|
||||
cron.schedule("0 * * * *", async () => {
|
||||
await processUnprocessedSessions(); // Process ALL until completion
|
||||
});
|
||||
```
|
||||
|
||||
### **Dashboard Refresh**
|
||||
```javascript
|
||||
// When user clicks refresh in dashboard
|
||||
POST /api/admin/refresh-sessions
|
||||
→ Import new CSV data
|
||||
→ Automatically trigger processUnprocessedSessions()
|
||||
```
|
||||
|
||||
### **Manual Processing**
|
||||
```bash
|
||||
# Process all unprocessed sessions until completion
|
||||
npx tsx scripts/trigger-processing-direct.js
|
||||
|
||||
# Check system status
|
||||
node scripts/check-database-status.js
|
||||
|
||||
# Complete workflow demonstration
|
||||
npx tsx scripts/complete-workflow-demo.js
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📊 Processing Logic
|
||||
|
||||
### **Batch Processing Algorithm**
|
||||
```javascript
|
||||
while (true) {
|
||||
// Get next batch of unprocessed sessions with messages
|
||||
const sessions = await findUnprocessedSessions(batchSize: 10);
|
||||
|
||||
if (sessions.length === 0) {
|
||||
console.log("✅ All sessions processed!");
|
||||
break;
|
||||
}
|
||||
|
||||
// Process batch with concurrency limit
|
||||
await processInParallel(sessions, maxConcurrency: 3);
|
||||
|
||||
// Small delay between batches
|
||||
await delay(1000ms);
|
||||
}
|
||||
```
|
||||
|
||||
### **Quality Validation**
|
||||
```javascript
|
||||
// Check data quality after AI processing
|
||||
const hasValidQuestions = questions.length > 0;
|
||||
const hasValidSummary = summary.length >= 10;
|
||||
const isValidData = hasValidQuestions && hasValidSummary;
|
||||
|
||||
if (!isValidData) {
|
||||
console.log("⚠️ Session marked as invalid data");
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🎯 System Behavior
|
||||
|
||||
### **What Gets Processed**
|
||||
- ✅ Sessions with `processed: false`
|
||||
- ✅ Sessions that have message records
|
||||
- ❌ Sessions without messages (skipped until transcripts fetched)
|
||||
- ❌ Already processed sessions (ignored)
|
||||
|
||||
### **Processing Results**
|
||||
- **Valid Sessions**: Full AI analysis with categories, questions, summary
|
||||
- **Invalid Sessions**: Marked as processed but flagged as low-quality
|
||||
- **Failed Sessions**: Error logged, remains unprocessed for retry
|
||||
|
||||
### **Dashboard Integration**
|
||||
- **Refresh Button**: Imports CSV + triggers processing automatically
|
||||
- **Real-time Updates**: Processing happens in background
|
||||
- **Quality Filtering**: Only meaningful conversations shown in analytics
|
||||
|
||||
---
|
||||
|
||||
## 📈 Current System Status
|
||||
|
||||
```
|
||||
📊 Database Status:
|
||||
📈 Total sessions: 108
|
||||
✅ Processed sessions: 20 (All sessions with messages)
|
||||
⏳ Unprocessed sessions: 88 (Sessions without transcript messages)
|
||||
💬 Sessions with messages: 20 (Ready for/already processed)
|
||||
🏢 Total companies: 1
|
||||
|
||||
🎯 System State: FULLY OPERATIONAL
|
||||
✅ All sessions with messages have been processed
|
||||
✅ Automated processing ready for new data
|
||||
✅ Quality validation working perfectly
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Available Scripts
|
||||
|
||||
### **Core Processing**
|
||||
```bash
|
||||
# Process all unprocessed sessions (complete batch processing)
|
||||
npx tsx scripts/trigger-processing-direct.js
|
||||
|
||||
# Check database status
|
||||
node scripts/check-database-status.js
|
||||
|
||||
# Fetch missing transcripts
|
||||
node scripts/fetch-and-parse-transcripts.js
|
||||
```
|
||||
|
||||
### **Data Management**
|
||||
```bash
|
||||
# Import fresh CSV data
|
||||
node scripts/trigger-csv-refresh.js
|
||||
|
||||
# Reset all sessions to unprocessed (for reprocessing)
|
||||
node scripts/reset-processed-status.js
|
||||
```
|
||||
|
||||
### **System Demonstration**
|
||||
```bash
|
||||
# Complete workflow demonstration
|
||||
npx tsx scripts/complete-workflow-demo.js
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🎉 Key Achievements
|
||||
|
||||
### **✅ Complete Automation**
|
||||
- **Zero manual intervention** needed for ongoing operations
|
||||
- **Hourly processing** of any new unprocessed sessions
|
||||
- **Dashboard integration** with automatic processing triggers
|
||||
|
||||
### **✅ Batch Processing**
|
||||
- **Processes ALL unprocessed sessions** until none remain
|
||||
- **Configurable batch sizes** and concurrency limits
|
||||
- **Progress tracking** with detailed logging
|
||||
|
||||
### **✅ Quality Validation**
|
||||
- **Automatic filtering** of low-quality sessions
|
||||
- **Enhanced OpenAI prompts** with crystal-clear instructions
|
||||
- **Data quality checks** before and after processing
|
||||
|
||||
### **✅ Production Ready**
|
||||
- **Error handling** and retry logic
|
||||
- **Background processing** without blocking responses
|
||||
- **Comprehensive logging** for monitoring and debugging
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Production Deployment
|
||||
|
||||
The system is now **100% ready for production** with:
|
||||
|
||||
1. **Automated CSV import** every hour
|
||||
2. **Automated AI processing** every hour
|
||||
3. **Dashboard refresh integration** for immediate processing
|
||||
4. **Quality validation** to ensure clean analytics
|
||||
5. **Complete batch processing** until all sessions are analyzed
|
||||
|
||||
**No manual intervention required** - the system will automatically process all new data as it arrives!
|
||||
@@ -1,79 +0,0 @@
|
||||
# Scheduler Error Fixes
|
||||
|
||||
## Issues Identified and Resolved
|
||||
|
||||
### 1. Invalid Company Configuration
|
||||
|
||||
**Problem**: Company `26fc3d34-c074-4556-85bd-9a66fafc0e08` had an invalid CSV URL (`https://example.com/data.csv`) with no authentication credentials.
|
||||
|
||||
**Solution**:
|
||||
|
||||
- Added validation in `fetchAndStoreSessionsForAllCompanies()` to skip companies with example/invalid URLs
|
||||
- Removed the invalid company record from the database using `fix_companies.js`
|
||||
|
||||
### 2. Transcript Fetching Errors
|
||||
|
||||
**Problem**: Multiple "Error fetching transcript: Unauthorized" messages were flooding the logs when individual transcript files couldn't be accessed.
|
||||
|
||||
**Solution**:
|
||||
|
||||
- Improved error handling in `fetchTranscriptContent()` function
|
||||
- Added probabilistic logging (only ~10% of errors logged) to prevent log spam
|
||||
- Added timeout (10 seconds) for transcript fetching
|
||||
- Made transcript fetching failures non-blocking (sessions are still created without transcript content)
|
||||
|
||||
### 3. CSV Fetching Errors
|
||||
|
||||
**Problem**: "Failed to fetch CSV: Not Found" errors for companies with invalid URLs.
|
||||
|
||||
**Solution**:
|
||||
|
||||
- Added URL validation to skip companies with `example.com` URLs
|
||||
- Improved error logging to be more descriptive
|
||||
|
||||
## Current Status
|
||||
|
||||
✅ **Fixed**: No more "Unauthorized" error spam
|
||||
✅ **Fixed**: No more "Not Found" CSV errors
|
||||
✅ **Fixed**: Scheduler runs cleanly without errors
|
||||
✅ **Improved**: Better error handling and logging
|
||||
|
||||
## Remaining Companies
|
||||
|
||||
After cleanup, only valid companies remain:
|
||||
|
||||
- **Demo Company** (`790b9233-d369-451f-b92c-f4dceb42b649`)
|
||||
- CSV URL: `https://proto.notso.ai/jumbo/chats`
|
||||
- Has valid authentication credentials
|
||||
- 107 sessions in database
|
||||
|
||||
## Files Modified
|
||||
|
||||
1. **lib/csvFetcher.js**
|
||||
|
||||
- Added company URL validation
|
||||
- Improved transcript fetching error handling
|
||||
- Reduced error log verbosity
|
||||
|
||||
2. **fix_companies.js** (cleanup script)
|
||||
- Removes invalid company records
|
||||
- Can be run again if needed
|
||||
|
||||
## Monitoring
|
||||
|
||||
The scheduler now runs cleanly every 15 minutes. To monitor:
|
||||
|
||||
```bash
|
||||
# Check scheduler logs
|
||||
node debug_db.js
|
||||
|
||||
# Test manual refresh
|
||||
node -e "import('./lib/csvFetcher.js').then(m => m.fetchAndStoreSessionsForAllCompanies())"
|
||||
```
|
||||
|
||||
## Future Improvements
|
||||
|
||||
1. Add health check endpoint for scheduler status
|
||||
2. Add metrics for successful/failed fetches
|
||||
3. Consider retry logic for temporary failures
|
||||
4. Add alerting for persistent failures
|
||||
@@ -1,185 +0,0 @@
|
||||
# Scheduler Workflow Documentation
|
||||
|
||||
## Overview
|
||||
The LiveDash system has two main schedulers that work together to fetch and process session data:
|
||||
|
||||
1. **Session Refresh Scheduler** - Fetches new sessions from CSV files
|
||||
2. **Processing Scheduler** - Processes session transcripts with AI
|
||||
|
||||
## Current Status (as of latest check)
|
||||
- **Total sessions**: 107
|
||||
- **Processed sessions**: 0
|
||||
- **Sessions with transcript**: 0
|
||||
- **Ready for processing**: 0
|
||||
|
||||
## How the `processed` Field Works
|
||||
|
||||
The ProcessingScheduler picks up sessions where `processed` is **NOT** `true`, which includes:
|
||||
- `processed = false`
|
||||
- `processed = null`
|
||||
|
||||
**Query used:**
|
||||
```javascript
|
||||
{ processed: { not: true } } // Either false or null
|
||||
```
|
||||
|
||||
## Complete Workflow
|
||||
|
||||
### Step 1: Session Refresh (CSV Fetching)
|
||||
**What it does:**
|
||||
- Fetches session data from company CSV URLs
|
||||
- Creates session records in database with basic metadata
|
||||
- Sets `transcriptContent = null` initially
|
||||
- Sets `processed = null` initially
|
||||
|
||||
**Runs:** Every 30 minutes (cron: `*/30 * * * *`)
|
||||
|
||||
### Step 2: Transcript Fetching
|
||||
**What it does:**
|
||||
- Downloads full transcript content for sessions
|
||||
- Updates `transcriptContent` field with actual conversation data
|
||||
- Sessions remain `processed = null` until AI processing
|
||||
|
||||
**Runs:** As part of session refresh process
|
||||
|
||||
### Step 3: AI Processing
|
||||
**What it does:**
|
||||
- Finds sessions with transcript content where `processed != true`
|
||||
- Sends transcripts to OpenAI for analysis
|
||||
- Extracts: sentiment, category, questions, summary, etc.
|
||||
- Updates session with processed data
|
||||
- Sets `processed = true`
|
||||
|
||||
**Runs:** Every hour (cron: `0 * * * *`)
|
||||
|
||||
## Manual Trigger Commands
|
||||
|
||||
### Check Current Status
|
||||
```bash
|
||||
node scripts/manual-triggers.js status
|
||||
```
|
||||
|
||||
### Trigger Session Refresh (Fetch new sessions from CSV)
|
||||
```bash
|
||||
node scripts/manual-triggers.js refresh
|
||||
```
|
||||
|
||||
### Trigger AI Processing (Process unprocessed sessions)
|
||||
```bash
|
||||
node scripts/manual-triggers.js process
|
||||
```
|
||||
|
||||
### Run Both Schedulers
|
||||
```bash
|
||||
node scripts/manual-triggers.js both
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### No Sessions Being Processed?
|
||||
1. **Check if sessions have transcripts:**
|
||||
```bash
|
||||
node scripts/manual-triggers.js status
|
||||
```
|
||||
|
||||
2. **If "Sessions with transcript" is 0:**
|
||||
- Sessions exist but transcripts haven't been fetched yet
|
||||
- Run session refresh: `node scripts/manual-triggers.js refresh`
|
||||
|
||||
3. **If "Ready for processing" is 0 but "Sessions with transcript" > 0:**
|
||||
- All sessions with transcripts have already been processed
|
||||
- Check if `OPENAI_API_KEY` is set in environment
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### "No sessions found requiring processing"
|
||||
- All sessions with transcripts have been processed (`processed = true`)
|
||||
- Or no sessions have transcript content yet
|
||||
|
||||
#### "OPENAI_API_KEY environment variable is not set"
|
||||
- Add OpenAI API key to `.env.development` file
|
||||
- Restart the application
|
||||
|
||||
#### "Error fetching transcript: Unauthorized"
|
||||
- CSV credentials are incorrect or expired
|
||||
- Check company CSV username/password in database
|
||||
|
||||
## Database Field Mapping
|
||||
|
||||
### Before AI Processing
|
||||
```javascript
|
||||
{
|
||||
id: "session-uuid",
|
||||
transcriptContent: "full conversation text" | null,
|
||||
processed: null,
|
||||
sentimentCategory: null,
|
||||
questions: null,
|
||||
summary: null,
|
||||
// ... other fields
|
||||
}
|
||||
```
|
||||
|
||||
### After AI Processing
|
||||
```javascript
|
||||
{
|
||||
id: "session-uuid",
|
||||
transcriptContent: "full conversation text",
|
||||
processed: true,
|
||||
sentimentCategory: "positive" | "neutral" | "negative",
|
||||
questions: '["question 1", "question 2"]', // JSON string
|
||||
summary: "Brief conversation summary",
|
||||
language: "en", // ISO 639-1 code
|
||||
messagesSent: 5,
|
||||
sentiment: 0.8, // Float value (-1 to 1)
|
||||
escalated: false,
|
||||
forwardedHr: false,
|
||||
category: "Schedule & Hours",
|
||||
// ... other fields
|
||||
}
|
||||
```
|
||||
|
||||
## Scheduler Configuration
|
||||
|
||||
### Session Refresh Scheduler
|
||||
- **File**: `lib/scheduler.js`
|
||||
- **Frequency**: Every 30 minutes
|
||||
- **Cron**: `*/30 * * * *`
|
||||
|
||||
### Processing Scheduler
|
||||
- **File**: `lib/processingScheduler.js`
|
||||
- **Frequency**: Every hour
|
||||
- **Cron**: `0 * * * *`
|
||||
- **Batch size**: 10 sessions per run
|
||||
|
||||
## Environment Variables Required
|
||||
|
||||
```bash
|
||||
# Database
|
||||
DATABASE_URL="postgresql://..."
|
||||
|
||||
# OpenAI (for processing)
|
||||
OPENAI_API_KEY="sk-..."
|
||||
|
||||
# NextAuth
|
||||
NEXTAUTH_SECRET="..."
|
||||
NEXTAUTH_URL="http://localhost:3000"
|
||||
```
|
||||
|
||||
## Next Steps for Testing
|
||||
|
||||
1. **Trigger session refresh** to fetch transcripts:
|
||||
```bash
|
||||
node scripts/manual-triggers.js refresh
|
||||
```
|
||||
|
||||
2. **Check status** to see if transcripts were fetched:
|
||||
```bash
|
||||
node scripts/manual-triggers.js status
|
||||
```
|
||||
|
||||
3. **Trigger processing** if transcripts are available:
|
||||
```bash
|
||||
node scripts/manual-triggers.js process
|
||||
```
|
||||
|
||||
4. **View results** in the dashboard session details pages
|
||||
@@ -1,86 +0,0 @@
|
||||
# Session Processing with OpenAI
|
||||
|
||||
This document explains how the session processing system works in LiveDash-Node.
|
||||
|
||||
## Overview
|
||||
|
||||
The system now includes an automated process for analyzing chat session transcripts using OpenAI's API. This process:
|
||||
|
||||
1. Fetches session data from CSV sources
|
||||
2. Only adds new sessions that don't already exist in the database
|
||||
3. Processes session transcripts with OpenAI to extract valuable insights
|
||||
4. Updates the database with the processed information
|
||||
|
||||
## How It Works
|
||||
|
||||
### Session Fetching
|
||||
|
||||
- The system fetches session data from configured CSV URLs for each company
|
||||
- Unlike the previous implementation, it now only adds sessions that don't already exist in the database
|
||||
- This prevents duplicate sessions and allows for incremental updates
|
||||
|
||||
### Transcript Processing
|
||||
|
||||
- For sessions with transcript content that haven't been processed yet, the system calls OpenAI's API
|
||||
- The API analyzes the transcript and extracts the following information:
|
||||
- Primary language used (ISO 639-1 code)
|
||||
- Number of messages sent by the user
|
||||
- Overall sentiment (positive, neutral, negative)
|
||||
- Whether the conversation was escalated
|
||||
- Whether HR contact was mentioned or provided
|
||||
- Best-fitting category for the conversation
|
||||
- Up to 5 paraphrased questions asked by the user
|
||||
- A brief summary of the conversation
|
||||
|
||||
### Scheduling
|
||||
|
||||
The system includes two schedulers:
|
||||
|
||||
1. **Session Refresh Scheduler**: Runs every 15 minutes to fetch new sessions from CSV sources
|
||||
2. **Session Processing Scheduler**: Runs every hour to process unprocessed sessions with OpenAI
|
||||
|
||||
## Database Schema
|
||||
|
||||
The Session model has been updated with new fields to store the processed data:
|
||||
|
||||
- `processed`: Boolean flag indicating whether the session has been processed
|
||||
- `sentimentCategory`: String value ("positive", "neutral", "negative") from OpenAI
|
||||
- `questions`: JSON array of questions asked by the user
|
||||
- `summary`: Brief summary of the conversation
|
||||
|
||||
## Configuration
|
||||
|
||||
### OpenAI API Key
|
||||
|
||||
To use the session processing feature, you need to add your OpenAI API key to the `.env.local` file:
|
||||
|
||||
```ini
|
||||
OPENAI_API_KEY=your_api_key_here
|
||||
```
|
||||
|
||||
### Running with Schedulers
|
||||
|
||||
To run the application with schedulers enabled:
|
||||
|
||||
- Development: `npm run dev`
|
||||
- Development (with schedulers disabled): `npm run dev:no-schedulers`
|
||||
- Production: `npm run start`
|
||||
|
||||
Note: These commands will start a custom Next.js server with the schedulers enabled. You'll need to have an OpenAI API key set in your `.env.local` file for the session processing to work.
|
||||
|
||||
## Manual Processing
|
||||
|
||||
You can also manually process sessions by running the script:
|
||||
|
||||
```
|
||||
node scripts/process_sessions.mjs
|
||||
```
|
||||
|
||||
This will process all unprocessed sessions that have transcript content.
|
||||
|
||||
## Customization
|
||||
|
||||
The processing logic can be customized by modifying:
|
||||
|
||||
- `lib/processingScheduler.ts`: Contains the OpenAI processing logic
|
||||
- `scripts/process_sessions.ts`: Standalone script for manual processing
|
||||
@@ -1,203 +0,0 @@
|
||||
# Transcript Parsing Implementation
|
||||
|
||||
## Overview
|
||||
Added structured message parsing to the LiveDash system, allowing transcripts to be broken down into individual messages with timestamps, roles, and content. This provides a much better user experience for viewing conversations.
|
||||
|
||||
## Database Changes
|
||||
|
||||
### New Message Table
|
||||
```sql
|
||||
CREATE TABLE Message (
|
||||
id TEXT PRIMARY KEY DEFAULT (uuid()),
|
||||
sessionId TEXT NOT NULL,
|
||||
timestamp DATETIME NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
order INTEGER NOT NULL,
|
||||
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (sessionId) REFERENCES Session(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX Message_sessionId_order_idx ON Message(sessionId, order);
|
||||
```
|
||||
|
||||
### Updated Session Table
|
||||
- Added `messages` relation to Session model
|
||||
- Sessions can now have both raw transcript content AND parsed messages
|
||||
|
||||
## New Components
|
||||
|
||||
### 1. Message Interface (`lib/types.ts`)
|
||||
```typescript
|
||||
export interface Message {
|
||||
id: string;
|
||||
sessionId: string;
|
||||
timestamp: Date;
|
||||
role: string; // "User", "Assistant", "System", etc.
|
||||
content: string;
|
||||
order: number; // Order within the conversation (0, 1, 2, ...)
|
||||
createdAt: Date;
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Transcript Parser (`lib/transcriptParser.js`)
|
||||
- **`parseChatLogToJSON(logString)`** - Parses raw transcript text into structured messages
|
||||
- **`storeMessagesForSession(sessionId, messages)`** - Stores parsed messages in database
|
||||
- **`processTranscriptForSession(sessionId, transcriptContent)`** - Complete processing for one session
|
||||
- **`processAllUnparsedTranscripts()`** - Batch process all unparsed transcripts
|
||||
- **`getMessagesForSession(sessionId)`** - Retrieve messages for a session
|
||||
|
||||
### 3. MessageViewer Component (`components/MessageViewer.tsx`)
|
||||
- Chat-like interface for displaying parsed messages
|
||||
- Color-coded by role (User: blue, Assistant: gray, System: yellow)
|
||||
- Shows timestamps and message order
|
||||
- Scrollable with conversation metadata
|
||||
|
||||
## Updated Components
|
||||
|
||||
### 1. Session API (`pages/api/dashboard/session/[id].ts`)
|
||||
- Now includes parsed messages in session response
|
||||
- Messages are ordered by `order` field (ascending)
|
||||
|
||||
### 2. Session Details Page (`app/dashboard/sessions/[id]/page.tsx`)
|
||||
- Added MessageViewer component
|
||||
- Shows both parsed messages AND raw transcript
|
||||
- Prioritizes parsed messages when available
|
||||
|
||||
### 3. ChatSession Interface (`lib/types.ts`)
|
||||
- Added optional `messages?: Message[]` field
|
||||
|
||||
## Parsing Logic
|
||||
|
||||
### Supported Format
|
||||
The parser expects transcript format:
|
||||
```
|
||||
[DD.MM.YYYY HH:MM:SS] Role: Message content
|
||||
[DD.MM.YYYY HH:MM:SS] User: Hello, I need help
|
||||
[DD.MM.YYYY HH:MM:SS] Assistant: How can I help you today?
|
||||
```
|
||||
|
||||
### Features
|
||||
- **Multi-line support** - Messages can span multiple lines
|
||||
- **Timestamp parsing** - Converts DD.MM.YYYY HH:MM:SS to ISO format
|
||||
- **Role detection** - Extracts sender role from each message
|
||||
- **Ordering** - Maintains conversation order with explicit order field
|
||||
- **Sorting** - Messages sorted by timestamp, then by role (User before Assistant)
|
||||
|
||||
## Manual Commands
|
||||
|
||||
### New Commands Added
|
||||
```bash
|
||||
# Parse transcripts into structured messages
|
||||
node scripts/manual-triggers.js parse
|
||||
|
||||
# Complete workflow: refresh → parse → process
|
||||
node scripts/manual-triggers.js all
|
||||
|
||||
# Check status (now shows parsing info)
|
||||
node scripts/manual-triggers.js status
|
||||
```
|
||||
|
||||
### Updated Commands
|
||||
- **`status`** - Now shows transcript and parsing statistics
|
||||
- **`all`** - New command that runs refresh → parse → process in sequence
|
||||
|
||||
## Workflow Integration
|
||||
|
||||
### Complete Processing Pipeline
|
||||
1. **Session Refresh** - Fetch sessions from CSV, download transcripts
|
||||
2. **Transcript Parsing** - Parse raw transcripts into structured messages
|
||||
3. **AI Processing** - Process sessions with OpenAI for sentiment, categories, etc.
|
||||
|
||||
### Database States
|
||||
```javascript
|
||||
// After CSV fetch
|
||||
{
|
||||
transcriptContent: "raw text...",
|
||||
messages: [], // Empty
|
||||
processed: null
|
||||
}
|
||||
|
||||
// After parsing
|
||||
{
|
||||
transcriptContent: "raw text...",
|
||||
messages: [Message, Message, ...], // Parsed
|
||||
processed: null
|
||||
}
|
||||
|
||||
// After AI processing
|
||||
{
|
||||
transcriptContent: "raw text...",
|
||||
messages: [Message, Message, ...], // Parsed
|
||||
processed: true,
|
||||
sentimentCategory: "positive",
|
||||
summary: "Brief summary...",
|
||||
// ... other AI fields
|
||||
}
|
||||
```
|
||||
|
||||
## User Experience Improvements
|
||||
|
||||
### Before
|
||||
- Only raw transcript text in a text area
|
||||
- Difficult to follow conversation flow
|
||||
- No clear distinction between speakers
|
||||
|
||||
### After
|
||||
- **Chat-like interface** with message bubbles
|
||||
- **Color-coded roles** for easy identification
|
||||
- **Timestamps** for each message
|
||||
- **Conversation metadata** (first/last message times)
|
||||
- **Fallback to raw transcript** if parsing fails
|
||||
- **Both views available** - structured AND raw
|
||||
|
||||
## Testing
|
||||
|
||||
### Manual Testing Commands
|
||||
```bash
|
||||
# Check current status
|
||||
node scripts/manual-triggers.js status
|
||||
|
||||
# Parse existing transcripts
|
||||
node scripts/manual-triggers.js parse
|
||||
|
||||
# Full pipeline test
|
||||
node scripts/manual-triggers.js all
|
||||
```
|
||||
|
||||
### Expected Results
|
||||
1. Sessions with transcript content get parsed into individual messages
|
||||
2. Session detail pages show chat-like interface
|
||||
3. Both parsed messages and raw transcript are available
|
||||
4. No data loss - original transcript content preserved
|
||||
|
||||
## Technical Benefits
|
||||
|
||||
### Performance
|
||||
- **Indexed queries** - Messages indexed by sessionId and order
|
||||
- **Efficient loading** - Only load messages when needed
|
||||
- **Cascading deletes** - Messages automatically deleted with sessions
|
||||
|
||||
### Maintainability
|
||||
- **Separation of concerns** - Parsing logic isolated in dedicated module
|
||||
- **Type safety** - Full TypeScript support for Message interface
|
||||
- **Error handling** - Graceful fallbacks when parsing fails
|
||||
|
||||
### Extensibility
|
||||
- **Role flexibility** - Supports any role names (User, Assistant, System, etc.)
|
||||
- **Content preservation** - Multi-line messages fully supported
|
||||
- **Metadata ready** - Easy to add message-level metadata in future
|
||||
|
||||
## Migration Notes
|
||||
|
||||
### Existing Data
|
||||
- **No data loss** - Original transcript content preserved
|
||||
- **Backward compatibility** - Pages work with or without parsed messages
|
||||
- **Gradual migration** - Can parse transcripts incrementally
|
||||
|
||||
### Database Migration
|
||||
- New Message table created with foreign key constraints
|
||||
- Existing Session table unchanged (only added relation)
|
||||
- Index created for efficient message queries
|
||||
|
||||
This implementation provides a solid foundation for enhanced conversation analysis and user experience while maintaining full backward compatibility.
|
||||
@@ -1,50 +0,0 @@
|
||||
import { getServerSession } from "next-auth";
|
||||
import { authOptions } from "../app/api/auth/[...nextauth]/route"; // Adjust path as needed
|
||||
import { prisma } from "./prisma";
|
||||
import { processUnprocessedSessions } from "./processingSchedulerNoCron";
|
||||
|
||||
export async function getAdminUser() {
|
||||
const session = await getServerSession(authOptions);
|
||||
|
||||
if (!session?.user) {
|
||||
throw new Error("Not logged in");
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
include: { company: true },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
throw new Error("No user found");
|
||||
}
|
||||
|
||||
if (user.role !== "admin") {
|
||||
throw new Error("Admin access required");
|
||||
}
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
export async function triggerSessionProcessing(batchSize?: number, maxConcurrency?: number) {
|
||||
const unprocessedCount = await prisma.session.count({
|
||||
where: {
|
||||
processed: false,
|
||||
messages: { some: {} }, // Must have messages
|
||||
},
|
||||
});
|
||||
|
||||
if (unprocessedCount === 0) {
|
||||
return { message: "No unprocessed sessions found", unprocessedCount: 0, processedCount: 0 };
|
||||
}
|
||||
|
||||
processUnprocessedSessions(batchSize, maxConcurrency)
|
||||
.then(() => {
|
||||
console.log(`[Manual Trigger] Processing completed`);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(`[Manual Trigger] Processing failed:`, error);
|
||||
});
|
||||
|
||||
return { message: `Started processing ${unprocessedCount} unprocessed sessions`, unprocessedCount };
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
import { prisma } from "./prisma";
|
||||
|
||||
export async function findUserByEmail(email: string) {
|
||||
return prisma.user.findUnique({
|
||||
where: { email },
|
||||
});
|
||||
}
|
||||
@@ -35,10 +35,10 @@ interface SessionData {
|
||||
startTime: Date;
|
||||
endTime: Date | null;
|
||||
ipAddress?: string;
|
||||
country?: string | null;
|
||||
language?: string | null;
|
||||
country?: string | null; // Will store ISO 3166-1 alpha-2 country code or null/undefined
|
||||
language?: string | null; // Will store ISO 639-1 language code or null/undefined
|
||||
messagesSent: number;
|
||||
sentiment?: string | null;
|
||||
sentiment: number | null;
|
||||
escalated: boolean;
|
||||
forwardedHr: boolean;
|
||||
fullTranscriptUrl?: string | null;
|
||||
@@ -50,16 +50,65 @@ interface SessionData {
|
||||
}
|
||||
|
||||
/**
|
||||
* Passes through country data as-is (no mapping)
|
||||
* Converts country names to ISO 3166-1 alpha-2 codes
|
||||
* @param countryStr Raw country string from CSV
|
||||
* @returns The country string as-is or null if empty
|
||||
* @returns ISO 3166-1 alpha-2 country code or null if not found
|
||||
*/
|
||||
function getCountryCode(countryStr?: string): string | null | undefined {
|
||||
if (countryStr === undefined) return undefined;
|
||||
if (countryStr === null || countryStr === "") return null;
|
||||
|
||||
// Clean the input
|
||||
const normalized = countryStr.trim();
|
||||
return normalized || null;
|
||||
if (!normalized) return null;
|
||||
|
||||
// Direct ISO code check (if already a 2-letter code)
|
||||
if (normalized.length === 2 && normalized === normalized.toUpperCase()) {
|
||||
return countries.isValid(normalized) ? normalized : null;
|
||||
}
|
||||
|
||||
// Special case for country codes used in the dataset
|
||||
const countryMapping: Record<string, string> = {
|
||||
BA: "BA", // Bosnia and Herzegovina
|
||||
NL: "NL", // Netherlands
|
||||
USA: "US", // United States
|
||||
UK: "GB", // United Kingdom
|
||||
GB: "GB", // Great Britain
|
||||
Nederland: "NL",
|
||||
Netherlands: "NL",
|
||||
Netherland: "NL",
|
||||
Holland: "NL",
|
||||
Germany: "DE",
|
||||
Deutschland: "DE",
|
||||
Belgium: "BE",
|
||||
België: "BE",
|
||||
Belgique: "BE",
|
||||
France: "FR",
|
||||
Frankreich: "FR",
|
||||
"United States": "US",
|
||||
"United States of America": "US",
|
||||
Bosnia: "BA",
|
||||
"Bosnia and Herzegovina": "BA",
|
||||
"Bosnia & Herzegovina": "BA",
|
||||
};
|
||||
|
||||
// Check mapping
|
||||
if (normalized in countryMapping) {
|
||||
return countryMapping[normalized];
|
||||
}
|
||||
|
||||
// Try to get the code from the country name (in English)
|
||||
try {
|
||||
const code = countries.getAlpha2Code(normalized, "en");
|
||||
if (code) return code;
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
`[CSV] Error converting country name to code: ${normalized} - ${error}\n`
|
||||
);
|
||||
}
|
||||
|
||||
// If all else fails, return null
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -131,15 +180,174 @@ function getLanguageCode(languageStr?: string): string | null | undefined {
|
||||
}
|
||||
|
||||
/**
|
||||
* Passes through category data as-is (no mapping)
|
||||
* Normalizes category values to standard groups
|
||||
* @param categoryStr The raw category string from CSV
|
||||
* @returns The category string as-is or null if empty
|
||||
* @returns A normalized category string
|
||||
*/
|
||||
function normalizeCategory(categoryStr?: string): string | null {
|
||||
if (!categoryStr) return null;
|
||||
|
||||
const normalized = categoryStr.trim();
|
||||
return normalized || null;
|
||||
const normalized = categoryStr.toLowerCase().trim();
|
||||
|
||||
// Define category groups using keywords
|
||||
const categoryMapping: Record<string, string[]> = {
|
||||
Onboarding: [
|
||||
"onboarding",
|
||||
"start",
|
||||
"begin",
|
||||
"new",
|
||||
"orientation",
|
||||
"welcome",
|
||||
"intro",
|
||||
"getting started",
|
||||
"documents",
|
||||
"documenten",
|
||||
"first day",
|
||||
"eerste dag",
|
||||
],
|
||||
"General Information": [
|
||||
"general",
|
||||
"algemeen",
|
||||
"info",
|
||||
"information",
|
||||
"informatie",
|
||||
"question",
|
||||
"vraag",
|
||||
"inquiry",
|
||||
"chat",
|
||||
"conversation",
|
||||
"gesprek",
|
||||
"talk",
|
||||
],
|
||||
Greeting: [
|
||||
"greeting",
|
||||
"greet",
|
||||
"hello",
|
||||
"hi",
|
||||
"hey",
|
||||
"welcome",
|
||||
"hallo",
|
||||
"hoi",
|
||||
"greetings",
|
||||
],
|
||||
"HR & Payroll": [
|
||||
"salary",
|
||||
"salaris",
|
||||
"pay",
|
||||
"payroll",
|
||||
"loon",
|
||||
"loonstrook",
|
||||
"hr",
|
||||
"human resources",
|
||||
"benefits",
|
||||
"vacation",
|
||||
"leave",
|
||||
"verlof",
|
||||
"maaltijdvergoeding",
|
||||
"vergoeding",
|
||||
],
|
||||
"Schedules & Hours": [
|
||||
"schedule",
|
||||
"hours",
|
||||
"tijd",
|
||||
"time",
|
||||
"roster",
|
||||
"rooster",
|
||||
"planning",
|
||||
"shift",
|
||||
"dienst",
|
||||
"working hours",
|
||||
"werktijden",
|
||||
"openingstijden",
|
||||
],
|
||||
"Role & Responsibilities": [
|
||||
"role",
|
||||
"job",
|
||||
"function",
|
||||
"functie",
|
||||
"task",
|
||||
"taak",
|
||||
"responsibilities",
|
||||
"leidinggevende",
|
||||
"manager",
|
||||
"teamleider",
|
||||
"supervisor",
|
||||
"team",
|
||||
"lead",
|
||||
],
|
||||
"Technical Support": [
|
||||
"technical",
|
||||
"tech",
|
||||
"support",
|
||||
"laptop",
|
||||
"computer",
|
||||
"system",
|
||||
"systeem",
|
||||
"it",
|
||||
"software",
|
||||
"hardware",
|
||||
],
|
||||
Offboarding: [
|
||||
"offboarding",
|
||||
"leave",
|
||||
"exit",
|
||||
"quit",
|
||||
"resign",
|
||||
"resignation",
|
||||
"ontslag",
|
||||
"vertrek",
|
||||
"afsluiting",
|
||||
],
|
||||
};
|
||||
|
||||
// Try to match the category using keywords
|
||||
for (const [category, keywords] of Object.entries(categoryMapping)) {
|
||||
if (keywords.some((keyword) => normalized.includes(keyword))) {
|
||||
return category;
|
||||
}
|
||||
}
|
||||
|
||||
// If no match, return "Other"
|
||||
return "Other";
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts sentiment string values to numeric scores
|
||||
* @param sentimentStr The sentiment string from the CSV
|
||||
* @returns A numeric score representing the sentiment
|
||||
*/
|
||||
function mapSentimentToScore(sentimentStr?: string): number | null {
|
||||
if (!sentimentStr) return null;
|
||||
|
||||
// Convert to lowercase for case-insensitive matching
|
||||
const sentiment = sentimentStr.toLowerCase();
|
||||
|
||||
// Map sentiment strings to numeric values on a scale from -1 to 2
|
||||
const sentimentMap: Record<string, number> = {
|
||||
happy: 1.0,
|
||||
excited: 1.5,
|
||||
positive: 0.8,
|
||||
neutral: 0.0,
|
||||
playful: 0.7,
|
||||
negative: -0.8,
|
||||
angry: -1.0,
|
||||
sad: -0.7,
|
||||
frustrated: -0.9,
|
||||
positief: 0.8, // Dutch
|
||||
neutraal: 0.0, // Dutch
|
||||
negatief: -0.8, // Dutch
|
||||
positivo: 0.8, // Spanish/Italian
|
||||
neutro: 0.0, // Spanish/Italian
|
||||
negativo: -0.8, // Spanish/Italian
|
||||
yes: 0.5, // For any "yes" sentiment
|
||||
no: -0.5, // For any "no" sentiment
|
||||
};
|
||||
|
||||
return sentimentMap[sentiment] !== undefined
|
||||
? sentimentMap[sentiment]
|
||||
: isNaN(parseFloat(sentiment))
|
||||
? null
|
||||
: parseFloat(sentiment);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -275,7 +483,7 @@ export async function fetchAndParseCsv(
|
||||
country: getCountryCode(r.country),
|
||||
language: getLanguageCode(r.language),
|
||||
messagesSent: Number(r.messages_sent) || 0,
|
||||
sentiment: r.sentiment,
|
||||
sentiment: mapSentimentToScore(r.sentiment),
|
||||
escalated: isTruthyValue(r.escalated),
|
||||
forwardedHr: isTruthyValue(r.forwarded_hr),
|
||||
fullTranscriptUrl: r.full_transcript_url,
|
||||
|
||||
@@ -1,332 +0,0 @@
|
||||
import { prisma } from "./prisma";
|
||||
|
||||
// Example: Function to get a user by ID
|
||||
export async function getUserById(id: string) {
|
||||
return prisma.user.findUnique({ where: { id } });
|
||||
}
|
||||
|
||||
export async function getCompanyByUserId(userId: string) {
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
});
|
||||
if (!user) return null;
|
||||
return prisma.company.findUnique({
|
||||
where: { id: user.companyId },
|
||||
});
|
||||
}
|
||||
|
||||
export async function updateCompanyCsvUrl(companyId: string, csvUrl: string) {
|
||||
return prisma.company.update({
|
||||
where: { id: companyId },
|
||||
data: { csvUrl },
|
||||
});
|
||||
}
|
||||
|
||||
export async function findUserByEmailWithCompany(email: string) {
|
||||
return prisma.user.findUnique({
|
||||
where: { email },
|
||||
include: { company: true },
|
||||
});
|
||||
}
|
||||
|
||||
export async function findSessionsByCompanyIdAndDateRange(companyId: string, startDate?: string, endDate?: string) {
|
||||
const whereClause: any = {
|
||||
companyId,
|
||||
processed: true,
|
||||
};
|
||||
|
||||
if (startDate && endDate) {
|
||||
whereClause.startTime = {
|
||||
gte: new Date(startDate),
|
||||
lte: new Date(endDate + "T23:59:59.999Z"),
|
||||
};
|
||||
}
|
||||
|
||||
return prisma.session.findMany({
|
||||
where: whereClause,
|
||||
include: {
|
||||
messages: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function getDistinctSessionCategories(companyId: string) {
|
||||
const categories = await prisma.session.findMany({
|
||||
where: {
|
||||
companyId,
|
||||
category: {
|
||||
not: null,
|
||||
},
|
||||
},
|
||||
distinct: ["category"],
|
||||
select: {
|
||||
category: true,
|
||||
},
|
||||
orderBy: {
|
||||
category: "asc",
|
||||
},
|
||||
});
|
||||
return categories.map((s) => s.category).filter(Boolean) as string[];
|
||||
}
|
||||
|
||||
export async function getDistinctSessionLanguages(companyId: string) {
|
||||
const languages = await prisma.session.findMany({
|
||||
where: {
|
||||
companyId,
|
||||
language: {
|
||||
not: null,
|
||||
},
|
||||
},
|
||||
distinct: ["language"],
|
||||
select: {
|
||||
language: true,
|
||||
},
|
||||
orderBy: {
|
||||
language: "asc",
|
||||
},
|
||||
});
|
||||
return languages.map((s) => s.language).filter(Boolean) as string[];
|
||||
}
|
||||
|
||||
export async function getSessionById(id: string) {
|
||||
return prisma.session.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
messages: {
|
||||
orderBy: { order: "asc" },
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function getFilteredAndPaginatedSessions(
|
||||
companyId: string,
|
||||
searchTerm: string | null,
|
||||
category: string | null,
|
||||
language: string | null,
|
||||
startDate: string | null,
|
||||
endDate: string | null,
|
||||
sortKey: string | null,
|
||||
sortOrder: string | null,
|
||||
page: number,
|
||||
pageSize: number
|
||||
) {
|
||||
const whereClause: Prisma.SessionWhereInput = { companyId };
|
||||
|
||||
// Search Term
|
||||
if (
|
||||
searchTerm &&
|
||||
typeof searchTerm === "string" &&
|
||||
searchTerm.trim() !== ""
|
||||
) {
|
||||
const searchConditions = [
|
||||
{ id: { contains: searchTerm } },
|
||||
{ category: { contains: searchTerm } },
|
||||
{ initialMsg: { contains: searchTerm } },
|
||||
];
|
||||
whereClause.OR = searchConditions;
|
||||
}
|
||||
|
||||
// Category Filter
|
||||
if (category && typeof category === "string" && category.trim() !== "") {
|
||||
whereClause.category = category;
|
||||
}
|
||||
|
||||
// Language Filter
|
||||
if (language && typeof language === "string" && language.trim() !== "") {
|
||||
whereClause.language = language;
|
||||
}
|
||||
|
||||
// Date Range Filter
|
||||
if (startDate && typeof startDate === "string") {
|
||||
whereClause.startTime = {
|
||||
...((whereClause.startTime as object) || {}),
|
||||
gte: new Date(startDate),
|
||||
};
|
||||
}
|
||||
if (endDate && typeof endDate === "string") {
|
||||
const inclusiveEndDate = new Date(endDate);
|
||||
inclusiveEndDate.setDate(inclusiveEndDate.getDate() + 1);
|
||||
whereClause.startTime = {
|
||||
...((whereClause.startTime as object) || {}),
|
||||
lt: inclusiveEndDate,
|
||||
};
|
||||
}
|
||||
|
||||
// Sorting
|
||||
const validSortKeys: { [key: string]: string } = {
|
||||
startTime: "startTime",
|
||||
category: "category",
|
||||
language: "language",
|
||||
sentiment: "sentiment",
|
||||
messagesSent: "messagesSent",
|
||||
avgResponseTime: "avgResponseTime",
|
||||
};
|
||||
|
||||
let orderByCondition:
|
||||
| Prisma.SessionOrderByWithRelationInput
|
||||
| Prisma.SessionOrderByWithRelationInput[];
|
||||
|
||||
const primarySortField =
|
||||
sortKey && typeof sortKey === "string" && validSortKeys[sortKey]
|
||||
? validSortKeys[sortKey]
|
||||
: "startTime"; // Default to startTime field if sortKey is invalid/missing
|
||||
|
||||
const primarySortOrder =
|
||||
sortOrder === "asc" || sortOrder === "desc" ? sortOrder : "desc"; // Default to desc order
|
||||
|
||||
if (primarySortField === "startTime") {
|
||||
// If sorting by startTime, it's the only sort criteria
|
||||
orderByCondition = { [primarySortField]: primarySortOrder };
|
||||
} else {
|
||||
// If sorting by another field, use startTime: "desc" as secondary sort
|
||||
orderByCondition = [
|
||||
{ [primarySortField]: primarySortOrder },
|
||||
{ startTime: "desc" },
|
||||
];
|
||||
}
|
||||
|
||||
return prisma.session.findMany({
|
||||
where: whereClause,
|
||||
orderBy: orderByCondition,
|
||||
skip: (page - 1) * pageSize,
|
||||
take: pageSize,
|
||||
});
|
||||
}
|
||||
|
||||
export async function countFilteredSessions(
|
||||
companyId: string,
|
||||
searchTerm: string | null,
|
||||
category: string | null,
|
||||
language: string | null,
|
||||
startDate: string | null,
|
||||
endDate: string | null
|
||||
) {
|
||||
const whereClause: Prisma.SessionWhereInput = { companyId };
|
||||
|
||||
// Search Term
|
||||
if (
|
||||
searchTerm &&
|
||||
typeof searchTerm === "string" &&
|
||||
searchTerm.trim() !== ""
|
||||
) {
|
||||
const searchConditions = [
|
||||
{ id: { contains: searchTerm } },
|
||||
{ category: { contains: searchTerm } },
|
||||
{ initialMsg: { contains: searchTerm } },
|
||||
];
|
||||
whereClause.OR = searchConditions;
|
||||
}
|
||||
|
||||
// Category Filter
|
||||
if (category && typeof category === "string" && category.trim() !== "") {
|
||||
whereClause.category = category;
|
||||
}
|
||||
|
||||
// Language Filter
|
||||
if (language && typeof language === "string" && language.trim() !== "") {
|
||||
whereClause.language = language;
|
||||
}
|
||||
|
||||
// Date Range Filter
|
||||
if (startDate && typeof startDate === "string") {
|
||||
whereClause.startTime = {
|
||||
...((whereClause.startTime as object) || {}),
|
||||
gte: new Date(startDate),
|
||||
};
|
||||
}
|
||||
if (endDate && typeof endDate === "string") {
|
||||
const inclusiveEndDate = new Date(endDate);
|
||||
inclusiveEndDate.setDate(inclusiveEndDate.getDate() + 1);
|
||||
whereClause.startTime = {
|
||||
...((whereClause.startTime as object) || {}),
|
||||
lt: inclusiveEndDate,
|
||||
};
|
||||
}
|
||||
|
||||
return prisma.session.count({ where: whereClause });
|
||||
}
|
||||
|
||||
export async function updateCompanySettings(
|
||||
companyId: string,
|
||||
data: {
|
||||
csvUrl?: string;
|
||||
csvUsername?: string;
|
||||
csvPassword?: string;
|
||||
sentimentAlert?: number | null;
|
||||
}
|
||||
) {
|
||||
return prisma.company.update({
|
||||
where: { id: companyId },
|
||||
data,
|
||||
});
|
||||
}
|
||||
|
||||
export async function getUsersByCompanyId(companyId: string) {
|
||||
return prisma.user.findMany({
|
||||
where: { companyId },
|
||||
});
|
||||
}
|
||||
|
||||
export async function userExistsByEmail(email: string) {
|
||||
return prisma.user.findUnique({ where: { email } });
|
||||
}
|
||||
|
||||
export async function createUser(email: string, passwordHash: string, companyId: string, role: string) {
|
||||
return prisma.user.create({
|
||||
data: {
|
||||
email,
|
||||
password: passwordHash,
|
||||
companyId,
|
||||
role,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function updateUserResetToken(email: string, token: string, expiry: Date) {
|
||||
return prisma.user.update({
|
||||
where: { email },
|
||||
data: { resetToken: token, resetTokenExpiry: expiry },
|
||||
});
|
||||
}
|
||||
|
||||
export async function createCompany(name: string, csvUrl: string) {
|
||||
return prisma.company.create({
|
||||
data: { name, csvUrl },
|
||||
});
|
||||
}
|
||||
|
||||
export async function findUserByResetToken(token: string) {
|
||||
return prisma.user.findFirst({
|
||||
where: {
|
||||
resetToken: token,
|
||||
resetTokenExpiry: { gte: new Date() },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function updateUserPasswordAndResetToken(userId: string, passwordHash: string) {
|
||||
return prisma.user.update({
|
||||
where: { id: userId },
|
||||
data: {
|
||||
password: passwordHash,
|
||||
resetToken: null,
|
||||
resetTokenExpiry: null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Add more data fetching functions here as needed
|
||||
|
||||
import { Prisma } from "@prisma/client";
|
||||
|
||||
export async function getSessionByCompanyId(where: Prisma.SessionWhereInput) {
|
||||
return prisma.session.findFirst({
|
||||
orderBy: { createdAt: "desc" },
|
||||
where,
|
||||
});
|
||||
}
|
||||
|
||||
export async function getCompanyById(companyId: string) {
|
||||
return prisma.company.findUnique({ where: { id: companyId } });
|
||||
}
|
||||
143
lib/metrics.ts
143
lib/metrics.ts
@@ -7,7 +7,6 @@ import {
|
||||
CountryMetrics, // Added CountryMetrics
|
||||
MetricsResult,
|
||||
WordCloudWord, // Added WordCloudWord
|
||||
TopQuestion, // Added TopQuestion
|
||||
} from "./types";
|
||||
|
||||
interface CompanyConfig {
|
||||
@@ -325,16 +324,7 @@ export function sessionMetrics(
|
||||
sessions: ChatSession[],
|
||||
companyConfig: CompanyConfig = {}
|
||||
): MetricsResult {
|
||||
// Filter out invalid data sessions for analytics
|
||||
const validSessions = sessions.filter(session => {
|
||||
// Include sessions that are either:
|
||||
// 1. Not processed yet (validData field doesn't exist or is undefined)
|
||||
// 2. Processed and marked as valid (validData === true)
|
||||
return session.validData !== false;
|
||||
});
|
||||
|
||||
const totalSessions = validSessions.length; // Only count valid sessions
|
||||
const totalRawSessions = sessions.length; // Keep track of all sessions for debugging
|
||||
const totalSessions = sessions.length; // Renamed from 'total' for clarity
|
||||
const byDay: DayMetrics = {};
|
||||
const byCategory: CategoryMetrics = {};
|
||||
const byLanguage: LanguageMetrics = {};
|
||||
@@ -357,25 +347,9 @@ export function sessionMetrics(
|
||||
let totalTokens = 0;
|
||||
let totalTokensEur = 0;
|
||||
const wordCounts: { [key: string]: number } = {};
|
||||
const alerts = 0;
|
||||
|
||||
// New metrics variables
|
||||
const hourlySessionCounts: { [hour: string]: number } = {};
|
||||
let resolvedChatsCount = 0;
|
||||
const questionCounts: { [question: string]: number } = {};
|
||||
let alerts = 0;
|
||||
|
||||
for (const session of sessions) {
|
||||
// Track hourly usage for peak time calculation
|
||||
if (session.startTime) {
|
||||
const hour = new Date(session.startTime).getHours();
|
||||
const hourKey = `${hour.toString().padStart(2, "0")}:00`;
|
||||
hourlySessionCounts[hourKey] = (hourlySessionCounts[hourKey] || 0) + 1;
|
||||
}
|
||||
|
||||
// Count resolved chats (sessions that have ended and are not escalated)
|
||||
if (session.endTime && !session.escalated) {
|
||||
resolvedChatsCount++;
|
||||
}
|
||||
// Unique Users: Prefer non-empty ipAddress, fallback to non-empty sessionId
|
||||
let identifierAdded = false;
|
||||
if (session.ipAddress && session.ipAddress.trim() !== "") {
|
||||
@@ -463,15 +437,22 @@ export function sessionMetrics(
|
||||
if (session.forwardedHr) forwardedHrCount++;
|
||||
|
||||
// Sentiment
|
||||
if (session.sentiment === "positive") {
|
||||
sentimentPositiveCount++;
|
||||
} else if (session.sentiment === "neutral") {
|
||||
sentimentNeutralCount++;
|
||||
} else if (session.sentiment === "negative") {
|
||||
sentimentNegativeCount++;
|
||||
if (session.sentiment !== undefined && session.sentiment !== null) {
|
||||
// Example thresholds, adjust as needed
|
||||
if (session.sentiment > 0.3) sentimentPositiveCount++;
|
||||
else if (session.sentiment < -0.3) sentimentNegativeCount++;
|
||||
else sentimentNeutralCount++;
|
||||
}
|
||||
|
||||
|
||||
// Sentiment Alert Check
|
||||
if (
|
||||
companyConfig.sentimentAlert !== undefined &&
|
||||
session.sentiment !== undefined &&
|
||||
session.sentiment !== null &&
|
||||
session.sentiment < companyConfig.sentimentAlert
|
||||
) {
|
||||
alerts++;
|
||||
}
|
||||
|
||||
// Tokens
|
||||
if (session.tokens !== undefined && session.tokens !== null) {
|
||||
@@ -506,62 +487,6 @@ export function sessionMetrics(
|
||||
byCountry[session.country] = (byCountry[session.country] || 0) + 1;
|
||||
}
|
||||
|
||||
// Extract questions from session
|
||||
const extractQuestions = () => {
|
||||
// 1. Extract from questions JSON field
|
||||
if (session.questions) {
|
||||
try {
|
||||
const questionsArray = JSON.parse(session.questions);
|
||||
if (Array.isArray(questionsArray)) {
|
||||
questionsArray.forEach((question: string) => {
|
||||
if (question && question.trim().length > 0) {
|
||||
const cleanQuestion = question.trim();
|
||||
questionCounts[cleanQuestion] =
|
||||
(questionCounts[cleanQuestion] || 0) + 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`[metrics] Failed to parse questions JSON for session ${session.id}: ${error}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Extract questions from user messages (if available)
|
||||
if (session.messages) {
|
||||
session.messages
|
||||
.filter((msg) => msg.role === "User")
|
||||
.forEach((msg) => {
|
||||
const content = msg.content.trim();
|
||||
// Simple heuristic: if message ends with ? or contains question words, treat as question
|
||||
if (
|
||||
content.endsWith("?") ||
|
||||
/\b(what|when|where|why|how|who|which|can|could|would|will|is|are|do|does|did)\b/i.test(
|
||||
content
|
||||
)
|
||||
) {
|
||||
questionCounts[content] = (questionCounts[content] || 0) + 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 3. Extract questions from initial message as fallback
|
||||
if (session.initialMsg) {
|
||||
const content = session.initialMsg.trim();
|
||||
if (
|
||||
content.endsWith("?") ||
|
||||
/\b(what|when|where|why|how|who|which|can|could|would|will|is|are|do|does|did)\b/i.test(
|
||||
content
|
||||
)
|
||||
) {
|
||||
questionCounts[content] = (questionCounts[content] || 0) + 1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
extractQuestions();
|
||||
|
||||
// Word Cloud Data (from initial message and transcript content)
|
||||
const processTextForWordCloud = (text: string | undefined | null) => {
|
||||
if (!text) return;
|
||||
@@ -581,8 +506,7 @@ export function sessionMetrics(
|
||||
}
|
||||
};
|
||||
processTextForWordCloud(session.initialMsg);
|
||||
// Note: transcriptContent is not available in ChatSession type
|
||||
// Could be added later if transcript parsing is implemented
|
||||
processTextForWordCloud(session.transcriptContent);
|
||||
}
|
||||
|
||||
const uniqueUsers = uniqueUserIds.size;
|
||||
@@ -623,33 +547,6 @@ export function sessionMetrics(
|
||||
mockPreviousPeriodData.avgResponseTime
|
||||
);
|
||||
|
||||
// Calculate new metrics
|
||||
|
||||
// 1. Average Daily Costs (euros)
|
||||
const avgDailyCosts =
|
||||
numDaysWithSessions > 0 ? totalTokensEur / numDaysWithSessions : 0;
|
||||
|
||||
// 2. Peak Usage Time
|
||||
let peakUsageTime = "N/A";
|
||||
if (Object.keys(hourlySessionCounts).length > 0) {
|
||||
const peakHour = Object.entries(hourlySessionCounts).sort(
|
||||
([, a], [, b]) => b - a
|
||||
)[0][0];
|
||||
const peakHourNum = parseInt(peakHour.split(":")[0]);
|
||||
const endHour = (peakHourNum + 1) % 24;
|
||||
peakUsageTime = `${peakHour}-${endHour.toString().padStart(2, "0")}:00`;
|
||||
}
|
||||
|
||||
// 3. Resolved Chats Percentage
|
||||
const resolvedChatsPercentage =
|
||||
totalSessions > 0 ? (resolvedChatsCount / totalSessions) * 100 : 0;
|
||||
|
||||
// 4. Top 5 Asked Questions
|
||||
const topQuestions: TopQuestion[] = Object.entries(questionCounts)
|
||||
.sort(([, a], [, b]) => b - a)
|
||||
.slice(0, 5) // Top 5 questions
|
||||
.map(([question, count]) => ({ question, count }));
|
||||
|
||||
// console.log("Debug metrics calculation:", {
|
||||
// totalSessionDuration,
|
||||
// validSessionsForDuration,
|
||||
@@ -688,11 +585,5 @@ export function sessionMetrics(
|
||||
lastUpdated: Date.now(),
|
||||
totalSessionDuration,
|
||||
validSessionsForDuration,
|
||||
|
||||
// New metrics
|
||||
avgDailyCosts,
|
||||
peakUsageTime,
|
||||
resolvedChatsPercentage,
|
||||
topQuestions,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
// Simple Prisma client setup
|
||||
// Prisma client setup with support for Cloudflare D1
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import { PrismaD1 } from "@prisma/adapter-d1";
|
||||
|
||||
// Add prisma to the NodeJS global type
|
||||
// This approach avoids NodeJS.Global which is not available
|
||||
@@ -9,12 +10,24 @@ declare const global: {
|
||||
prisma: PrismaClient | undefined;
|
||||
};
|
||||
|
||||
// Initialize Prisma Client
|
||||
const prisma = global.prisma || new PrismaClient();
|
||||
// Check if we're running in Cloudflare Workers environment
|
||||
const isCloudflareWorker = typeof globalThis.DB !== 'undefined';
|
||||
|
||||
// Save in global if we're in development
|
||||
if (process.env.NODE_ENV !== "production") {
|
||||
global.prisma = prisma;
|
||||
// Initialize Prisma Client
|
||||
let prisma: PrismaClient;
|
||||
|
||||
if (isCloudflareWorker) {
|
||||
// In Cloudflare Workers, use D1 adapter
|
||||
const adapter = new PrismaD1(globalThis.DB);
|
||||
prisma = new PrismaClient({ adapter });
|
||||
} else {
|
||||
// In Next.js/Node.js, use regular SQLite
|
||||
prisma = global.prisma || new PrismaClient();
|
||||
|
||||
// Save in global if we're in development
|
||||
if (process.env.NODE_ENV !== "production") {
|
||||
global.prisma = prisma;
|
||||
}
|
||||
}
|
||||
|
||||
export { prisma };
|
||||
|
||||
@@ -1,475 +0,0 @@
|
||||
// Session processing scheduler - TypeScript version
|
||||
// Note: Disabled due to Next.js compatibility issues
|
||||
// import cron from "node-cron";
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import fetch from "node-fetch";
|
||||
import { readFileSync } from "fs";
|
||||
import { fileURLToPath } from "url";
|
||||
import { dirname, join } from "path";
|
||||
import { VALID_CATEGORIES, ValidCategory, SentimentCategory } from "./types";
|
||||
|
||||
// Load environment variables from .env.local
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const envPath = join(__dirname, "..", ".env.local");
|
||||
|
||||
try {
|
||||
const envFile = readFileSync(envPath, "utf8");
|
||||
const envVars = envFile
|
||||
.split("\n")
|
||||
.filter((line) => line.trim() && !line.startsWith("#"));
|
||||
|
||||
envVars.forEach((line) => {
|
||||
const [key, ...valueParts] = line.split("=");
|
||||
if (key && valueParts.length > 0) {
|
||||
const value = valueParts.join("=").trim();
|
||||
if (!process.env[key.trim()]) {
|
||||
process.env[key.trim()] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
// Silently fail if .env.local doesn't exist
|
||||
}
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||
const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
|
||||
|
||||
interface ProcessedData {
|
||||
language: string;
|
||||
sentiment: "positive" | "neutral" | "negative";
|
||||
escalated: boolean;
|
||||
forwarded_hr: boolean;
|
||||
category: ValidCategory;
|
||||
questions: string | string[];
|
||||
summary: string;
|
||||
tokens: number;
|
||||
tokens_eur: number;
|
||||
}
|
||||
|
||||
interface ProcessingResult {
|
||||
sessionId: string;
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a session transcript using OpenAI API
|
||||
*/
|
||||
async function processTranscriptWithOpenAI(
|
||||
sessionId: string,
|
||||
transcript: string
|
||||
): Promise<ProcessedData> {
|
||||
if (!OPENAI_API_KEY) {
|
||||
throw new Error("OPENAI_API_KEY environment variable is not set");
|
||||
}
|
||||
|
||||
// Create a system message with instructions
|
||||
const systemMessage = `
|
||||
System: You are a JSON-generating assistant. Your task is to analyze raw chat transcripts between a user and an assistant and return structured data.
|
||||
|
||||
⚠️ IMPORTANT:
|
||||
- You must return a **single, valid JSON object**.
|
||||
- Do **not** include markdown formatting, code fences, explanations, or comments.
|
||||
- The JSON must match the exact structure and constraints described below.
|
||||
|
||||
Here is the schema you must follow:
|
||||
|
||||
{{
|
||||
"language": "ISO 639-1 code, e.g., 'en', 'nl'",
|
||||
"sentiment": "'positive', 'neutral', or 'negative'",
|
||||
"escalated": "bool: true if the assistant connected or referred to a human agent, otherwise false",
|
||||
"forwarded_hr": "bool: true if HR contact info was given, otherwise false",
|
||||
"category": "one of: 'Schedule & Hours', 'Leave & Vacation', 'Sick Leave & Recovery', 'Salary & Compensation', 'Contract & Hours', 'Onboarding', 'Offboarding', 'Workwear & Staff Pass', 'Team & Contacts', 'Personal Questions', 'Access & Login', 'Social questions', 'Unrecognized / Other'",
|
||||
"questions": "a single question or an array of simplified questions asked by the user formulated in English, try to make a question out of messages",
|
||||
"summary": "Brief summary (1–2 sentences) of the conversation",
|
||||
"tokens": "integer, number of tokens used for the API call",
|
||||
"tokens_eur": "float, cost of the API call in EUR",
|
||||
}}
|
||||
|
||||
You must format your output as a JSON value that adheres to a given "JSON Schema" instance.
|
||||
|
||||
"JSON Schema" is a declarative language that allows you to annotate and validate JSON documents.
|
||||
|
||||
For example, the example "JSON Schema" instance {"properties": {"foo": {"description": "a list of test words", "type": "array", "items": {"type": "string"}}}}, "required": ["foo"]}}
|
||||
would match an object with one required property, "foo". The "type" property specifies "foo" must be an "array", and the "description" property semantically describes it as "a list of test words". The items within "foo" must be strings.
|
||||
Thus, the object {"foo": ["bar", "baz"]} is a well-formatted instance of this example "JSON Schema". The object {"properties": {"foo": ["bar", "baz"]}} is not well-formatted.
|
||||
|
||||
Your output will be parsed and type-checked according to the provided schema instance, so make sure all fields in your output match the schema exactly and there are no trailing commas!
|
||||
|
||||
Here is the JSON Schema instance your output must adhere to. Include the enclosing markdown codeblock:
|
||||
|
||||
{{"type":"object","properties":{"language":{"type":"string","pattern":"^[a-z]{2}$","description":"ISO 639-1 code for the user's primary language"},"sentiment":{"type":"string","enum":["positive","neutral","negative"],"description":"Overall tone of the user during the conversation"},"escalated":{"type":"boolean","description":"Whether the assistant indicated it could not help"},"forwarded_hr":{"type":"boolean","description":"Whether HR contact was mentioned or provided"},"category":{"type":"string","enum":["Schedule & Hours","Leave & Vacation","Sick Leave & Recovery","Salary & Compensation","Contract & Hours","Onboarding","Offboarding","Workwear & Staff Pass","Team & Contacts","Personal Questions","Access & Login","Social questions","Unrecognized / Other"],"description":"Best-fitting topic category for the conversation"},"questions":{"oneOf":[{"type":"string"},{"type":"array","items":{"type":"string"}}],"description":"A single question or a list of paraphrased questions asked by the user in English"},"summary":{"type":"string","minLength":10,"maxLength":300,"description":"Brief summary of the conversation"},"tokens":{"type":"integer","description":"Number of tokens used for the API call"},"tokens_eur":{"type":"number","description":"Cost of the API call in EUR"}},"required":["language","sentiment","escalated","forwarded_hr","category","questions","summary","tokens","tokens_eur"],"additionalProperties":false,"$schema":"http://json-schema.org/draft-07/schema#"}}
|
||||
`;
|
||||
|
||||
try {
|
||||
const response = await fetch(OPENAI_API_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${OPENAI_API_KEY}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: "gpt-4-turbo",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: systemMessage,
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: transcript,
|
||||
},
|
||||
],
|
||||
temperature: 0.3, // Lower temperature for more consistent results
|
||||
response_format: { type: "json_object" },
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`OpenAI API error: ${response.status} - ${errorText}`);
|
||||
}
|
||||
|
||||
const data: any = await response.json();
|
||||
const processedData = JSON.parse(data.choices[0].message.content);
|
||||
|
||||
// Validate the response against our expected schema
|
||||
validateOpenAIResponse(processedData);
|
||||
|
||||
return processedData;
|
||||
} catch (error) {
|
||||
process.stderr.write(`Error processing transcript with OpenAI: ${error}\n`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates the OpenAI response against our expected schema
|
||||
*/
|
||||
function validateOpenAIResponse(data: any): void {
|
||||
// Check required fields
|
||||
const requiredFields = [
|
||||
"language",
|
||||
"sentiment",
|
||||
"escalated",
|
||||
"forwarded_hr",
|
||||
"category",
|
||||
"questions",
|
||||
"summary",
|
||||
"tokens",
|
||||
"tokens_eur",
|
||||
];
|
||||
|
||||
for (const field of requiredFields) {
|
||||
if (!(field in data)) {
|
||||
throw new Error(`Missing required field: ${field}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate field types
|
||||
if (typeof data.language !== "string" || !/^[a-z]{2}$/.test(data.language)) {
|
||||
throw new Error(
|
||||
"Invalid language format. Expected ISO 639-1 code (e.g., 'en')"
|
||||
);
|
||||
}
|
||||
|
||||
if (!["positive", "neutral", "negative"].includes(data.sentiment)) {
|
||||
throw new Error(
|
||||
"Invalid sentiment. Expected 'positive', 'neutral', or 'negative'"
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof data.escalated !== "boolean") {
|
||||
throw new Error("Invalid escalated. Expected boolean");
|
||||
}
|
||||
|
||||
if (typeof data.forwarded_hr !== "boolean") {
|
||||
throw new Error("Invalid forwarded_hr. Expected boolean");
|
||||
}
|
||||
|
||||
if (!VALID_CATEGORIES.includes(data.category)) {
|
||||
throw new Error(
|
||||
`Invalid category. Expected one of: ${VALID_CATEGORIES.join(", ")}`
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof data.questions !== "string" && !Array.isArray(data.questions)) {
|
||||
throw new Error("Invalid questions. Expected string or array of strings");
|
||||
}
|
||||
|
||||
if (
|
||||
typeof data.summary !== "string" ||
|
||||
data.summary.length < 10 ||
|
||||
data.summary.length > 300
|
||||
) {
|
||||
throw new Error(
|
||||
"Invalid summary. Expected string between 10-300 characters"
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof data.tokens !== "number" || data.tokens < 0) {
|
||||
throw new Error("Invalid tokens. Expected non-negative number");
|
||||
}
|
||||
|
||||
if (typeof data.tokens_eur !== "number" || data.tokens_eur < 0) {
|
||||
throw new Error("Invalid tokens_eur. Expected non-negative number");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single session
|
||||
*/
|
||||
async function processSingleSession(session: any): Promise<ProcessingResult> {
|
||||
if (session.messages.length === 0) {
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: false,
|
||||
error: "Session has no messages",
|
||||
};
|
||||
}
|
||||
|
||||
// Check for minimum data quality requirements
|
||||
const userMessages = session.messages.filter((msg: any) =>
|
||||
msg.role.toLowerCase() === 'user' || msg.role.toLowerCase() === 'human'
|
||||
);
|
||||
|
||||
if (userMessages.length === 0) {
|
||||
// Mark as invalid data - no user interaction
|
||||
await prisma.session.update({
|
||||
where: { id: session.id },
|
||||
data: {
|
||||
processed: true,
|
||||
summary: "No user messages found - marked as invalid data",
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: true,
|
||||
error: "No user messages - marked as invalid data",
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Convert messages back to transcript format for OpenAI processing
|
||||
const transcript = session.messages
|
||||
.map(
|
||||
(msg: any) =>
|
||||
`[${new Date(msg.timestamp)
|
||||
.toLocaleString("en-GB", {
|
||||
day: "2-digit",
|
||||
month: "2-digit",
|
||||
year: "numeric",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit",
|
||||
})
|
||||
.replace(",", "")}] ${msg.role}: ${msg.content}`
|
||||
)
|
||||
.join("\n");
|
||||
|
||||
const processedData = await processTranscriptWithOpenAI(
|
||||
session.id,
|
||||
transcript
|
||||
);
|
||||
|
||||
// Check if the processed data indicates low quality (empty questions, very short summary, etc.)
|
||||
const hasValidQuestions =
|
||||
processedData.questions &&
|
||||
(Array.isArray(processedData.questions)
|
||||
? processedData.questions.length > 0
|
||||
: typeof processedData.questions === "string");
|
||||
const hasValidSummary = processedData.summary && processedData.summary.length >= 10;
|
||||
const isValidData = hasValidQuestions && hasValidSummary;
|
||||
|
||||
// Update the session with processed data
|
||||
await prisma.session.update({
|
||||
where: { id: session.id },
|
||||
data: {
|
||||
language: processedData.language,
|
||||
sentiment: processedData.sentiment,
|
||||
escalated: processedData.escalated,
|
||||
forwardedHr: processedData.forwarded_hr,
|
||||
category: processedData.category,
|
||||
questions: processedData.questions,
|
||||
summary: processedData.summary,
|
||||
tokens: {
|
||||
increment: processedData.tokens,
|
||||
},
|
||||
tokensEur: {
|
||||
increment: processedData.tokens_eur,
|
||||
},
|
||||
processed: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!isValidData) {
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] ⚠️ Session ${session.id} marked as invalid data (empty questions or short summary)\n`
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: true,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process sessions in parallel with concurrency limit
|
||||
*/
|
||||
async function processSessionsInParallel(
|
||||
sessions: any[],
|
||||
maxConcurrency: number = 5
|
||||
): Promise<ProcessingResult[]> {
|
||||
const results: Promise<ProcessingResult>[] = [];
|
||||
const executing: Promise<ProcessingResult>[] = [];
|
||||
|
||||
for (const session of sessions) {
|
||||
const promise = processSingleSession(session).then((result) => {
|
||||
process.stdout.write(
|
||||
result.success
|
||||
? `[ProcessingScheduler] ✓ Successfully processed session ${result.sessionId}\n`
|
||||
: `[ProcessingScheduler] ✗ Failed to process session ${result.sessionId}: ${result.error}\n`
|
||||
);
|
||||
return result;
|
||||
});
|
||||
|
||||
results.push(promise);
|
||||
executing.push(promise);
|
||||
|
||||
if (executing.length >= maxConcurrency) {
|
||||
await Promise.race(executing);
|
||||
const completedIndex = executing.findIndex((p) => p === promise);
|
||||
if (completedIndex !== -1) {
|
||||
executing.splice(completedIndex, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Promise.all(results);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process unprocessed sessions in batches until completion
|
||||
*/
|
||||
export async function processUnprocessedSessions(
|
||||
batchSize: number = 10,
|
||||
maxConcurrency: number = 5
|
||||
): Promise<{ totalProcessed: number; totalFailed: number; totalTime: number }> {
|
||||
process.stdout.write(
|
||||
"[ProcessingScheduler] Starting complete processing of all unprocessed sessions...\n"
|
||||
);
|
||||
|
||||
let totalProcessed = 0;
|
||||
let totalFailed = 0;
|
||||
const overallStartTime = Date.now();
|
||||
let batchNumber = 1;
|
||||
|
||||
while (true) {
|
||||
// Find sessions that have messages but haven't been processed
|
||||
const sessionsToProcess = await prisma.session.findMany({
|
||||
where: {
|
||||
AND: [
|
||||
{ messages: { some: {} } }, // Must have messages
|
||||
{ processed: false }, // Only unprocessed sessions
|
||||
],
|
||||
},
|
||||
include: {
|
||||
messages: {
|
||||
orderBy: { order: "asc" },
|
||||
},
|
||||
},
|
||||
take: batchSize,
|
||||
});
|
||||
|
||||
// Filter to only sessions that have messages
|
||||
const sessionsWithMessages = sessionsToProcess.filter(
|
||||
(session: any) => session.messages && session.messages.length > 0
|
||||
);
|
||||
|
||||
if (sessionsWithMessages.length === 0) {
|
||||
process.stdout.write(
|
||||
"[ProcessingScheduler] ✅ All sessions with messages have been processed!\n"
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] 📦 Batch ${batchNumber}: Processing ${sessionsWithMessages.length} sessions (max concurrency: ${maxConcurrency})...\n`
|
||||
);
|
||||
|
||||
const batchStartTime = Date.now();
|
||||
const results = await processSessionsInParallel(
|
||||
sessionsWithMessages,
|
||||
maxConcurrency
|
||||
);
|
||||
const batchEndTime = Date.now();
|
||||
|
||||
const batchSuccessCount = results.filter((r) => r.success).length;
|
||||
const batchErrorCount = results.filter((r) => !r.success).length;
|
||||
|
||||
totalProcessed += batchSuccessCount;
|
||||
totalFailed += batchErrorCount;
|
||||
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] 📦 Batch ${batchNumber} complete: ${batchSuccessCount} success, ${batchErrorCount} failed (${((batchEndTime - batchStartTime) / 1000).toFixed(2)}s)\n`
|
||||
);
|
||||
|
||||
batchNumber++;
|
||||
|
||||
// Small delay between batches to prevent overwhelming the system
|
||||
if (sessionsWithMessages.length === batchSize) {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
}
|
||||
}
|
||||
|
||||
const overallEndTime = Date.now();
|
||||
const totalTime = (overallEndTime - overallStartTime) / 1000;
|
||||
|
||||
process.stdout.write("[ProcessingScheduler] 🎉 Complete processing finished!\n");
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] 📊 Total results: ${totalProcessed} processed, ${totalFailed} failed\n`
|
||||
);
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] ⏱️ Total processing time: ${totalTime.toFixed(2)}s\n`
|
||||
);
|
||||
|
||||
return { totalProcessed, totalFailed, totalTime };
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the processing scheduler
|
||||
*/
|
||||
export function startProcessingScheduler(): void {
|
||||
// Note: Scheduler disabled due to Next.js compatibility issues
|
||||
// Use manual triggers via API endpoints instead
|
||||
console.log("Processing scheduler disabled - using manual triggers via API endpoints");
|
||||
|
||||
// Original cron-based implementation commented out due to Next.js compatibility issues
|
||||
// The functionality is now available via the /api/admin/trigger-processing endpoint
|
||||
/*
|
||||
cron.schedule("0 * * * *", async () => {
|
||||
try {
|
||||
await processUnprocessedSessions();
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
`[ProcessingScheduler] Error in scheduler: ${error}\n`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
process.stdout.write(
|
||||
"[ProcessingScheduler] Started processing scheduler (runs hourly).\n"
|
||||
);
|
||||
*/
|
||||
}
|
||||
@@ -1,447 +0,0 @@
|
||||
// Session processing without cron dependency - for Next.js API routes
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import fetch from "node-fetch";
|
||||
import { readFileSync } from "fs";
|
||||
import { fileURLToPath } from "url";
|
||||
import { dirname, join } from "path";
|
||||
import { VALID_CATEGORIES, ValidCategory, SentimentCategory } from "./types";
|
||||
|
||||
// Load environment variables from .env.local
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const envPath = join(__dirname, "..", ".env.local");
|
||||
|
||||
try {
|
||||
const envFile = readFileSync(envPath, "utf8");
|
||||
const envVars = envFile
|
||||
.split("\n")
|
||||
.filter((line) => line.trim() && !line.startsWith("#"));
|
||||
|
||||
envVars.forEach((line) => {
|
||||
const [key, ...valueParts] = line.split("=");
|
||||
if (key && valueParts.length > 0) {
|
||||
const value = valueParts.join("=").trim();
|
||||
if (!process.env[key.trim()]) {
|
||||
process.env[key.trim()] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
// Silently fail if .env.local doesn't exist
|
||||
}
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||
const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
|
||||
|
||||
interface ProcessedData {
|
||||
language: string;
|
||||
sentiment: "positive" | "neutral" | "negative";
|
||||
escalated: boolean;
|
||||
forwarded_hr: boolean;
|
||||
category: ValidCategory;
|
||||
questions: string | string[];
|
||||
summary: string;
|
||||
tokens: number;
|
||||
tokens_eur: number;
|
||||
}
|
||||
|
||||
interface ProcessingResult {
|
||||
sessionId: string;
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a session transcript using OpenAI API
|
||||
*/
|
||||
async function processTranscriptWithOpenAI(
|
||||
sessionId: string,
|
||||
transcript: string
|
||||
): Promise<ProcessedData> {
|
||||
if (!OPENAI_API_KEY) {
|
||||
throw new Error("OPENAI_API_KEY environment variable is not set");
|
||||
}
|
||||
|
||||
// Create a system message with instructions
|
||||
const systemMessage = `
|
||||
System: You are a JSON-generating assistant. Your task is to analyze raw chat transcripts between a user and an assistant and return structured data.
|
||||
|
||||
⚠️ IMPORTANT:
|
||||
- You must return a **single, valid JSON object**.
|
||||
- Do **not** include markdown formatting, code fences, explanations, or comments.
|
||||
- The JSON must match the exact structure and constraints described below.
|
||||
|
||||
Here is the schema you must follow:
|
||||
|
||||
{
|
||||
"language": "ISO 639-1 code, e.g., 'en', 'nl'",
|
||||
"sentiment": "'positive', 'neutral', or 'negative'",
|
||||
"escalated": "bool: true if the assistant connected or referred to a human agent, otherwise false",
|
||||
"forwarded_hr": "bool: true if HR contact info was given, otherwise false",
|
||||
"category": "one of: 'Schedule & Hours', 'Leave & Vacation', 'Sick Leave & Recovery', 'Salary & Compensation', 'Contract & Hours', 'Onboarding', 'Offboarding', 'Workwear & Staff Pass', 'Team & Contacts', 'Personal Questions', 'Access & Login', 'Social questions', 'Unrecognized / Other'",
|
||||
"questions": "a single question or an array of simplified questions asked by the user formulated in English, try to make a question out of messages",
|
||||
"summary": "Brief summary (1–2 sentences) of the conversation",
|
||||
"tokens": "integer, number of tokens used for the API call",
|
||||
"tokens_eur": "float, cost of the API call in EUR",
|
||||
}
|
||||
|
||||
You must format your output as a JSON value that adheres to a given "JSON Schema" instance.
|
||||
|
||||
"JSON Schema" is a declarative language that allows you to annotate and validate JSON documents.
|
||||
|
||||
For example, the example "JSON Schema" instance {{"properties": {{"foo": {{"description": "a list of test words", "type": "array", "items": {{"type": "string"}}}}}}, "required": ["foo"]}}}}
|
||||
would match an object with one required property, "foo". The "type" property specifies "foo" must be an "array", and the "description" property semantically describes it as "a list of test words". The items within "foo" must be strings.
|
||||
Thus, the object {{"foo": ["bar", "baz"]}} is a well-formatted instance of this example "JSON Schema". The object {{"properties": {{"foo": ["bar", "baz"]}}}} is not well-formatted.
|
||||
|
||||
Your output will be parsed and type-checked according to the provided schema instance, so make sure all fields in your output match the schema exactly and there are no trailing commas!
|
||||
|
||||
Here is the JSON Schema instance your output must adhere to. Include the enclosing markdown codeblock:
|
||||
|
||||
{{"type":"object","properties":{"language":{"type":"string","pattern":"^[a-z]{2}$","description":"ISO 639-1 code for the user's primary language"},"sentiment":{"type":"string","enum":["positive","neutral","negative"],"description":"Overall tone of the user during the conversation"},"escalated":{"type":"boolean","description":"Whether the assistant indicated it could not help"},"forwarded_hr":{"type":"boolean","description":"Whether HR contact was mentioned or provided"},"category":{"type":"string","enum":["Schedule & Hours","Leave & Vacation","Sick Leave & Recovery","Salary & Compensation","Contract & Hours","Onboarding","Offboarding","Workwear & Staff Pass","Team & Contacts","Personal Questions","Access & Login","Social questions","Unrecognized / Other"],"description":"Best-fitting topic category for the conversation"},"questions":{"oneOf":[{"type":"string"},{"type":"array","items":{"type":"string"}}],"description":"A single question or a list of paraphrased questions asked by the user in English"},"summary":{"type":"string","minLength":10,"maxLength":300,"description":"Brief summary of the conversation"},"tokens":{"type":"integer","description":"Number of tokens used for the API call"},"tokens_eur":{"type":"number","description":"Cost of the API call in EUR"}},"required":["language","sentiment","escalated","forwarded_hr","category","questions","summary","tokens","tokens_eur"],"additionalProperties":false,"$schema":"http://json-schema.org/draft-07/schema#"}}
|
||||
|
||||
`;
|
||||
|
||||
try {
|
||||
const response = await fetch(OPENAI_API_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${OPENAI_API_KEY}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: "gpt-4-turbo",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: systemMessage,
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: transcript,
|
||||
},
|
||||
],
|
||||
temperature: 0.3, // Lower temperature for more consistent results
|
||||
response_format: { type: "json_object" },
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`OpenAI API error: ${response.status} - ${errorText}`);
|
||||
}
|
||||
|
||||
const data: any = await response.json();
|
||||
const processedData = JSON.parse(data.choices[0].message.content);
|
||||
|
||||
// Validate the response against our expected schema
|
||||
validateOpenAIResponse(processedData);
|
||||
|
||||
return processedData;
|
||||
} catch (error) {
|
||||
process.stderr.write(`Error processing transcript with OpenAI: ${error}\n`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates the OpenAI response against our expected schema
|
||||
*/
|
||||
function validateOpenAIResponse(data: any): void {
|
||||
// Check required fields
|
||||
const requiredFields = [
|
||||
"language",
|
||||
"sentiment",
|
||||
"escalated",
|
||||
"forwarded_hr",
|
||||
"category",
|
||||
"questions",
|
||||
"summary",
|
||||
"tokens",
|
||||
"tokens_eur",
|
||||
];
|
||||
|
||||
for (const field of requiredFields) {
|
||||
if (!(field in data)) {
|
||||
throw new Error(`Missing required field: ${field}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate field types
|
||||
if (typeof data.language !== "string" || !/^[a-z]{2}$/.test(data.language)) {
|
||||
throw new Error(
|
||||
"Invalid language format. Expected ISO 639-1 code (e.g., 'en')"
|
||||
);
|
||||
}
|
||||
|
||||
if (!["positive", "neutral", "negative"].includes(data.sentiment)) {
|
||||
throw new Error(
|
||||
"Invalid sentiment. Expected 'positive', 'neutral', or 'negative'"
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof data.escalated !== "boolean") {
|
||||
throw new Error("Invalid escalated. Expected boolean");
|
||||
}
|
||||
|
||||
if (typeof data.forwarded_hr !== "boolean") {
|
||||
throw new Error("Invalid forwarded_hr. Expected boolean");
|
||||
}
|
||||
|
||||
if (!VALID_CATEGORIES.includes(data.category)) {
|
||||
throw new Error(
|
||||
`Invalid category. Expected one of: ${VALID_CATEGORIES.join(", ")}`
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof data.questions !== "string" && !Array.isArray(data.questions)) {
|
||||
throw new Error("Invalid questions. Expected string or array of strings");
|
||||
}
|
||||
|
||||
if (
|
||||
typeof data.summary !== "string" ||
|
||||
data.summary.length < 10 ||
|
||||
data.summary.length > 300
|
||||
) {
|
||||
throw new Error(
|
||||
"Invalid summary. Expected string between 10-300 characters"
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof data.tokens !== "number" || data.tokens < 0) {
|
||||
throw new Error("Invalid tokens. Expected non-negative number");
|
||||
}
|
||||
|
||||
if (typeof data.tokens_eur !== "number" || data.tokens_eur < 0) {
|
||||
throw new Error("Invalid tokens_eur. Expected non-negative number");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single session
|
||||
*/
|
||||
async function processSingleSession(session: any): Promise<ProcessingResult> {
|
||||
if (session.messages.length === 0) {
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: false,
|
||||
error: "Session has no messages",
|
||||
};
|
||||
}
|
||||
|
||||
// Check for minimum data quality requirements
|
||||
const userMessages = session.messages.filter((msg: any) =>
|
||||
msg.role.toLowerCase() === 'user' || msg.role.toLowerCase() === 'human'
|
||||
);
|
||||
|
||||
if (userMessages.length === 0) {
|
||||
// Mark as invalid data - no user interaction
|
||||
await prisma.session.update({
|
||||
where: { id: session.id },
|
||||
data: {
|
||||
processed: true,
|
||||
summary: "No user messages found - marked as invalid data",
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: true,
|
||||
error: "No user messages - marked as invalid data",
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Convert messages back to transcript format for OpenAI processing
|
||||
const transcript = session.messages
|
||||
.map(
|
||||
(msg: any) =>
|
||||
`[${new Date(msg.timestamp)
|
||||
.toLocaleString("en-GB", {
|
||||
day: "2-digit",
|
||||
month: "2-digit",
|
||||
year: "numeric",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit",
|
||||
})
|
||||
.replace(",", "")}] ${msg.role}: ${msg.content}`
|
||||
)
|
||||
.join("\n");
|
||||
|
||||
const processedData = await processTranscriptWithOpenAI(
|
||||
session.id,
|
||||
transcript
|
||||
);
|
||||
|
||||
// Check if the processed data indicates low quality (empty questions, very short summary, etc.)
|
||||
const hasValidQuestions =
|
||||
processedData.questions &&
|
||||
(Array.isArray(processedData.questions)
|
||||
? processedData.questions.length > 0
|
||||
: typeof processedData.questions === "string");
|
||||
const hasValidSummary = processedData.summary && processedData.summary.length >= 10;
|
||||
const isValidData = hasValidQuestions && hasValidSummary;
|
||||
|
||||
// Update the session with processed data
|
||||
await prisma.session.update({
|
||||
where: { id: session.id },
|
||||
data: {
|
||||
language: processedData.language,
|
||||
sentiment: processedData.sentiment,
|
||||
escalated: processedData.escalated,
|
||||
forwardedHr: processedData.forwarded_hr,
|
||||
category: processedData.category,
|
||||
questions: processedData.questions,
|
||||
summary: processedData.summary,
|
||||
tokens: {
|
||||
increment: processedData.tokens,
|
||||
},
|
||||
tokensEur: {
|
||||
increment: processedData.tokens_eur,
|
||||
},
|
||||
processed: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!isValidData) {
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] ⚠️ Session ${session.id} marked as invalid data (empty questions or short summary)\n`
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: true,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
sessionId: session.id,
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process sessions in parallel with concurrency limit
|
||||
*/
|
||||
async function processSessionsInParallel(
|
||||
sessions: any[],
|
||||
maxConcurrency: number = 5
|
||||
): Promise<ProcessingResult[]> {
|
||||
const results: Promise<ProcessingResult>[] = [];
|
||||
const executing: Promise<ProcessingResult>[] = [];
|
||||
|
||||
for (const session of sessions) {
|
||||
const promise = processSingleSession(session).then((result) => {
|
||||
process.stdout.write(
|
||||
result.success
|
||||
? `[ProcessingScheduler] ✓ Successfully processed session ${result.sessionId}\n`
|
||||
: `[ProcessingScheduler] ✗ Failed to process session ${result.sessionId}: ${result.error}\n`
|
||||
);
|
||||
return result;
|
||||
});
|
||||
|
||||
results.push(promise);
|
||||
executing.push(promise);
|
||||
|
||||
if (executing.length >= maxConcurrency) {
|
||||
await Promise.race(executing);
|
||||
const completedIndex = executing.findIndex((p) => p === promise);
|
||||
if (completedIndex !== -1) {
|
||||
executing.splice(completedIndex, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Promise.all(results);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process unprocessed sessions in batches until completion
|
||||
*/
|
||||
export async function processUnprocessedSessions(
|
||||
batchSize: number = 10,
|
||||
maxConcurrency: number = 5
|
||||
): Promise<{ totalProcessed: number; totalFailed: number; totalTime: number }> {
|
||||
process.stdout.write(
|
||||
"[ProcessingScheduler] Starting complete processing of all unprocessed sessions...\n"
|
||||
);
|
||||
|
||||
let totalProcessed = 0;
|
||||
let totalFailed = 0;
|
||||
const overallStartTime = Date.now();
|
||||
let batchNumber = 1;
|
||||
|
||||
while (true) {
|
||||
// Find sessions that have messages but haven't been processed
|
||||
const sessionsToProcess = await prisma.session.findMany({
|
||||
where: {
|
||||
AND: [
|
||||
{ messages: { some: {} } }, // Must have messages
|
||||
{ processed: false }, // Only unprocessed sessions
|
||||
],
|
||||
},
|
||||
include: {
|
||||
messages: {
|
||||
orderBy: { order: "asc" },
|
||||
},
|
||||
},
|
||||
take: batchSize,
|
||||
});
|
||||
|
||||
// Filter to only sessions that have messages
|
||||
const sessionsWithMessages = sessionsToProcess.filter(
|
||||
(session: any) => session.messages && session.messages.length > 0
|
||||
);
|
||||
|
||||
if (sessionsWithMessages.length === 0) {
|
||||
process.stdout.write(
|
||||
"[ProcessingScheduler] ✅ All sessions with messages have been processed!\n"
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] 📦 Batch ${batchNumber}: Processing ${sessionsWithMessages.length} sessions (max concurrency: ${maxConcurrency})...\n`
|
||||
);
|
||||
|
||||
const batchStartTime = Date.now();
|
||||
const results = await processSessionsInParallel(
|
||||
sessionsWithMessages,
|
||||
maxConcurrency
|
||||
);
|
||||
const batchEndTime = Date.now();
|
||||
|
||||
const batchSuccessCount = results.filter((r) => r.success).length;
|
||||
const batchErrorCount = results.filter((r) => !r.success).length;
|
||||
|
||||
totalProcessed += batchSuccessCount;
|
||||
totalFailed += batchErrorCount;
|
||||
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] 📦 Batch ${batchNumber} complete: ${batchSuccessCount} success, ${batchErrorCount} failed (${((batchEndTime - batchStartTime) / 1000).toFixed(2)}s)\n`
|
||||
);
|
||||
|
||||
batchNumber++;
|
||||
|
||||
// Small delay between batches to prevent overwhelming the system
|
||||
if (sessionsWithMessages.length === batchSize) {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
}
|
||||
}
|
||||
|
||||
const overallEndTime = Date.now();
|
||||
const totalTime = (overallEndTime - overallStartTime) / 1000;
|
||||
|
||||
process.stdout.write("[ProcessingScheduler] 🎉 Complete processing finished!\n");
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] 📊 Total results: ${totalProcessed} processed, ${totalFailed} failed\n`
|
||||
);
|
||||
process.stdout.write(
|
||||
`[ProcessingScheduler] ⏱️ Total processing time: ${totalTime.toFixed(2)}s\n`
|
||||
);
|
||||
|
||||
return { totalProcessed, totalFailed, totalTime };
|
||||
}
|
||||
111
lib/scheduler.ts
111
lib/scheduler.ts
@@ -1,6 +1,5 @@
|
||||
// node-cron job to auto-refresh session data every 15 mins
|
||||
// Note: Disabled due to Next.js compatibility issues
|
||||
// import cron from "node-cron";
|
||||
import cron from "node-cron";
|
||||
import { prisma } from "./prisma";
|
||||
import { fetchAndParseCsv } from "./csvFetcher";
|
||||
|
||||
@@ -11,11 +10,107 @@ interface SessionCreateData {
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export function startScheduler() {
|
||||
// Note: Scheduler disabled due to Next.js compatibility issues
|
||||
// Use manual triggers via API endpoints instead
|
||||
console.log("Session refresh scheduler disabled - using manual triggers via API endpoints");
|
||||
/**
|
||||
* Fetches transcript content from a URL with optional authentication
|
||||
* @param url The URL to fetch the transcript from
|
||||
* @param username Optional username for Basic Auth
|
||||
* @param password Optional password for Basic Auth
|
||||
* @returns The transcript content or null if fetching fails
|
||||
*/
|
||||
async function fetchTranscriptContent(
|
||||
url: string,
|
||||
username?: string,
|
||||
password?: string
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const authHeader =
|
||||
username && password
|
||||
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||
: undefined;
|
||||
|
||||
// Original cron-based implementation commented out due to Next.js compatibility issues
|
||||
// The functionality is now available via the /api/admin/refresh-sessions endpoint
|
||||
const response = await fetch(url, {
|
||||
headers: authHeader ? { Authorization: authHeader } : {},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
process.stderr.write(
|
||||
`Error fetching transcript: ${response.statusText}\n`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
return await response.text();
|
||||
} catch (error) {
|
||||
process.stderr.write(`Failed to fetch transcript: ${error}\n`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function startScheduler() {
|
||||
cron.schedule("*/15 * * * *", async () => {
|
||||
const companies = await prisma.company.findMany();
|
||||
for (const company of companies) {
|
||||
try {
|
||||
const sessions = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
await prisma.session.deleteMany({ where: { companyId: company.id } });
|
||||
|
||||
for (const session of sessions) {
|
||||
// Fetch transcript content if URL is available
|
||||
let transcriptContent: string | null = null;
|
||||
if (session.fullTranscriptUrl) {
|
||||
transcriptContent = await fetchTranscriptContent(
|
||||
session.fullTranscriptUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
}
|
||||
|
||||
const sessionData: SessionCreateData = {
|
||||
...session,
|
||||
companyId: company.id,
|
||||
id: session.id || session.sessionId || `sess_${Date.now()}`,
|
||||
// Ensure startTime is not undefined
|
||||
startTime: session.startTime || new Date(),
|
||||
};
|
||||
|
||||
// Only include fields that are properly typed for Prisma
|
||||
await prisma.session.create({
|
||||
data: {
|
||||
id: sessionData.id,
|
||||
companyId: sessionData.companyId,
|
||||
startTime: sessionData.startTime,
|
||||
// endTime is required in the schema, so use startTime if not available
|
||||
endTime: session.endTime || new Date(),
|
||||
ipAddress: session.ipAddress || null,
|
||||
country: session.country || null,
|
||||
language: session.language || null,
|
||||
sentiment:
|
||||
typeof session.sentiment === "number"
|
||||
? session.sentiment
|
||||
: null,
|
||||
messagesSent:
|
||||
typeof session.messagesSent === "number"
|
||||
? session.messagesSent
|
||||
: 0,
|
||||
category: session.category || null,
|
||||
fullTranscriptUrl: session.fullTranscriptUrl || null,
|
||||
transcriptContent: transcriptContent, // Add the transcript content
|
||||
},
|
||||
});
|
||||
}
|
||||
// Using process.stdout.write instead of console.log to avoid ESLint warning
|
||||
process.stdout.write(
|
||||
`[Scheduler] Refreshed sessions for company: ${company.name}\n`
|
||||
);
|
||||
} catch (e) {
|
||||
// Using process.stderr.write instead of console.error to avoid ESLint warning
|
||||
process.stderr.write(
|
||||
`[Scheduler] Failed for company: ${company.name} - ${e}\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
// Combined scheduler initialization
|
||||
// Note: Removed cron-based scheduler imports to avoid Next.js compatibility issues
|
||||
// import { startScheduler } from "./scheduler";
|
||||
// import { startProcessingScheduler } from "./processingScheduler";
|
||||
|
||||
/**
|
||||
* Initialize all schedulers
|
||||
* - Session refresh scheduler (runs every 15 minutes)
|
||||
* - Session processing scheduler (runs every hour)
|
||||
*/
|
||||
export function initializeSchedulers() {
|
||||
// Note: All schedulers disabled due to Next.js compatibility issues
|
||||
// Use manual triggers via API endpoints instead
|
||||
console.log("Schedulers disabled - using manual triggers via API endpoints");
|
||||
// startScheduler();
|
||||
// startProcessingScheduler();
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
import { prisma } from "./prisma";
|
||||
import { fetchAndParseCsv } from "./csvFetcher";
|
||||
import { triggerCompleteWorkflow } from "./workflow";
|
||||
|
||||
interface SessionCreateData {
|
||||
id: string;
|
||||
startTime: Date;
|
||||
companyId: string;
|
||||
sessionId?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export async function processSessions(company: any) {
|
||||
const sessions = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
|
||||
for (const session of sessions) {
|
||||
const sessionData: SessionCreateData = {
|
||||
...session,
|
||||
companyId: company.id,
|
||||
id:
|
||||
session.id ||
|
||||
session.sessionId ||
|
||||
`sess_${Date.now()}_${Math.random().toString(36).substring(2, 7)}`,
|
||||
// Ensure startTime is not undefined
|
||||
startTime: session.startTime || new Date(),
|
||||
};
|
||||
|
||||
// Validate dates to prevent "Invalid Date" errors
|
||||
const startTime =
|
||||
sessionData.startTime instanceof Date &&
|
||||
!isNaN(sessionData.startTime.getTime())
|
||||
? sessionData.startTime
|
||||
: new Date();
|
||||
const endTime =
|
||||
session.endTime instanceof Date && !isNaN(session.endTime.getTime())
|
||||
? session.endTime
|
||||
: new Date();
|
||||
|
||||
// Check if the session already exists
|
||||
const existingSession = await prisma.session.findUnique({
|
||||
where: { id: sessionData.id },
|
||||
});
|
||||
|
||||
if (existingSession) {
|
||||
// Skip this session as it already exists
|
||||
continue;
|
||||
}
|
||||
|
||||
// Only include fields that are properly typed for Prisma
|
||||
await prisma.session.create({
|
||||
data: {
|
||||
id: sessionData.id,
|
||||
companyId: sessionData.companyId,
|
||||
startTime: startTime,
|
||||
endTime: endTime,
|
||||
ipAddress: session.ipAddress || null,
|
||||
country: session.country || null,
|
||||
language: session.language || null,
|
||||
messagesSent:
|
||||
typeof session.messagesSent === "number" ? session.messagesSent : 0,
|
||||
sentiment:
|
||||
typeof session.sentiment === "number" ? session.sentiment : null,
|
||||
escalated:
|
||||
typeof session.escalated === "boolean" ? session.escalated : null,
|
||||
forwardedHr:
|
||||
typeof session.forwardedHr === "boolean"
|
||||
? session.forwardedHr
|
||||
: null,
|
||||
fullTranscriptUrl: session.fullTranscriptUrl || null,
|
||||
avgResponseTime:
|
||||
typeof session.avgResponseTime === "number"
|
||||
? session.avgResponseTime
|
||||
: null,
|
||||
tokens: typeof session.tokens === "number" ? session.tokens : null,
|
||||
tokensEur:
|
||||
typeof session.tokensEur === "number" ? session.tokensEur : null,
|
||||
category: session.category || null,
|
||||
initialMsg: session.initialMsg || null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// After importing sessions, automatically trigger complete workflow (fetch transcripts + process)
|
||||
// This runs in the background without blocking the response
|
||||
triggerCompleteWorkflow()
|
||||
.then((result) => {
|
||||
console.log(`[Refresh Sessions] Complete workflow finished: ${result.message}`);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(`[Refresh Sessions] Complete workflow failed:`, error);
|
||||
});
|
||||
|
||||
return sessions.length;
|
||||
}
|
||||
@@ -1,263 +0,0 @@
|
||||
// Transcript parser utility - converts raw transcript text to structured messages
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
/**
|
||||
* Parses chat log string to JSON format with individual messages
|
||||
* @param {string} logString - Raw transcript content
|
||||
* @returns {Object} Parsed data with messages array and metadata
|
||||
*/
|
||||
export function parseChatLogToJSON(logString) {
|
||||
// Convert to string if it's not already
|
||||
const stringData =
|
||||
typeof logString === "string" ? logString : String(logString);
|
||||
|
||||
// Split by lines and filter out empty lines
|
||||
const lines = stringData.split("\n").filter((line) => line.trim() !== "");
|
||||
|
||||
const messages = [];
|
||||
let currentMessage = null;
|
||||
|
||||
for (const line of lines) {
|
||||
// Check if line starts with a timestamp pattern [DD.MM.YYYY HH:MM:SS]
|
||||
const timestampMatch = line.match(
|
||||
/^\[(\d{2}\.\d{2}\.\d{4} \d{2}:\d{2}:\d{2})\] (.+?): (.*)$/
|
||||
);
|
||||
|
||||
if (timestampMatch) {
|
||||
// If we have a previous message, push it to the array
|
||||
if (currentMessage) {
|
||||
messages.push(currentMessage);
|
||||
}
|
||||
|
||||
// Parse the timestamp
|
||||
const [, timestamp, sender, content] = timestampMatch;
|
||||
|
||||
// Convert DD.MM.YYYY HH:MM:SS to ISO format
|
||||
const [datePart, timePart] = timestamp.split(" ");
|
||||
const [day, month, year] = datePart.split(".");
|
||||
const [hour, minute, second] = timePart.split(":");
|
||||
|
||||
const dateObject = new Date(year, month - 1, day, hour, minute, second);
|
||||
|
||||
// Create new message object
|
||||
currentMessage = {
|
||||
timestamp: dateObject.toISOString(),
|
||||
role: sender,
|
||||
content: content,
|
||||
};
|
||||
} else if (currentMessage) {
|
||||
// This is a continuation of the previous message (multiline)
|
||||
currentMessage.content += "\n" + line;
|
||||
}
|
||||
}
|
||||
|
||||
// Don't forget the last message
|
||||
if (currentMessage) {
|
||||
messages.push(currentMessage);
|
||||
}
|
||||
|
||||
return {
|
||||
messages: messages.sort((a, b) => {
|
||||
// First sort by timestamp (ascending)
|
||||
const timeComparison = new Date(a.timestamp) - new Date(b.timestamp);
|
||||
if (timeComparison !== 0) {
|
||||
return timeComparison;
|
||||
}
|
||||
|
||||
// If timestamps are equal, sort by role (descending)
|
||||
// This puts "User" before "Assistant" when timestamps are the same
|
||||
return b.role.localeCompare(a.role);
|
||||
}),
|
||||
totalMessages: messages.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores parsed messages in the database for a session
|
||||
* @param {string} sessionId - The session ID
|
||||
* @param {Array} messages - Array of parsed message objects
|
||||
*/
|
||||
export async function storeMessagesForSession(sessionId, messages) {
|
||||
try {
|
||||
// First, delete any existing messages for this session
|
||||
await prisma.message.deleteMany({
|
||||
where: { sessionId },
|
||||
});
|
||||
|
||||
// Then insert the new messages
|
||||
const messageData = messages.map((message, index) => ({
|
||||
sessionId,
|
||||
timestamp: new Date(message.timestamp),
|
||||
role: message.role,
|
||||
content: message.content,
|
||||
order: index,
|
||||
}));
|
||||
|
||||
if (messageData.length > 0) {
|
||||
await prisma.message.createMany({
|
||||
data: messageData,
|
||||
});
|
||||
|
||||
// Extract actual end time from the latest message
|
||||
const latestMessage = messages.reduce((latest, current) => {
|
||||
return new Date(current.timestamp) > new Date(latest.timestamp)
|
||||
? current
|
||||
: latest;
|
||||
});
|
||||
|
||||
// Update the session's endTime with the actual conversation end time
|
||||
await prisma.session.update({
|
||||
where: { id: sessionId },
|
||||
data: {
|
||||
endTime: new Date(latestMessage.timestamp),
|
||||
},
|
||||
});
|
||||
|
||||
process.stdout.write(
|
||||
`[TranscriptParser] Updated session ${sessionId} endTime to ${latestMessage.timestamp}\n`
|
||||
);
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[TranscriptParser] Stored ${messageData.length} messages for session ${sessionId}\n`
|
||||
);
|
||||
return messageData.length;
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
`[TranscriptParser] Error storing messages for session ${sessionId}: ${error}\n`
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes and stores transcript for a single session
|
||||
* @param {string} sessionId - The session ID
|
||||
* @param {string} transcriptContent - Raw transcript content
|
||||
* @returns {Promise<Object>} Processing result with message count
|
||||
*/
|
||||
export async function processTranscriptForSession(
|
||||
sessionId,
|
||||
transcriptContent
|
||||
) {
|
||||
if (!transcriptContent || transcriptContent.trim() === "") {
|
||||
throw new Error("No transcript content provided");
|
||||
}
|
||||
|
||||
try {
|
||||
// Parse the transcript
|
||||
const parsed = parseChatLogToJSON(transcriptContent);
|
||||
|
||||
// Store messages in database
|
||||
const messageCount = await storeMessagesForSession(
|
||||
sessionId,
|
||||
parsed.messages
|
||||
);
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
messageCount,
|
||||
totalMessages: parsed.totalMessages,
|
||||
success: true,
|
||||
};
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
`[TranscriptParser] Error processing transcript for session ${sessionId}: ${error}\n`
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes transcripts for all sessions that have transcript content but no parsed messages
|
||||
*/
|
||||
export async function processAllUnparsedTranscripts() {
|
||||
process.stdout.write(
|
||||
"[TranscriptParser] Starting to process unparsed transcripts...\n"
|
||||
);
|
||||
|
||||
try {
|
||||
// Find sessions with transcript content but no messages
|
||||
const sessionsToProcess = await prisma.session.findMany({
|
||||
where: {
|
||||
AND: [
|
||||
{ transcriptContent: { not: null } },
|
||||
{ transcriptContent: { not: "" } },
|
||||
],
|
||||
},
|
||||
include: {
|
||||
messages: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Filter to only sessions without messages
|
||||
const unparsedSessions = sessionsToProcess.filter(
|
||||
(session) => session.messages.length === 0
|
||||
);
|
||||
|
||||
if (unparsedSessions.length === 0) {
|
||||
process.stdout.write(
|
||||
"[TranscriptParser] No unparsed transcripts found.\n"
|
||||
);
|
||||
return { processed: 0, errors: 0 };
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[TranscriptParser] Found ${unparsedSessions.length} sessions with unparsed transcripts.\n`
|
||||
);
|
||||
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (const session of unparsedSessions) {
|
||||
try {
|
||||
const result = await processTranscriptForSession(
|
||||
session.id,
|
||||
session.transcriptContent
|
||||
);
|
||||
process.stdout.write(
|
||||
`[TranscriptParser] Processed session ${session.id}: ${result.messageCount} messages\n`
|
||||
);
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
`[TranscriptParser] Failed to process session ${session.id}: ${error}\n`
|
||||
);
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[TranscriptParser] Completed processing. Success: ${successCount}, Errors: ${errorCount}\n`
|
||||
);
|
||||
return { processed: successCount, errors: errorCount };
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
`[TranscriptParser] Error in processAllUnparsedTranscripts: ${error}\n`
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parsed messages for a session
|
||||
* @param {string} sessionId - The session ID
|
||||
* @returns {Promise<Array>} Array of message objects
|
||||
*/
|
||||
export async function getMessagesForSession(sessionId) {
|
||||
try {
|
||||
const messages = await prisma.message.findMany({
|
||||
where: { sessionId },
|
||||
orderBy: { order: "asc" },
|
||||
});
|
||||
|
||||
return messages;
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
`[TranscriptParser] Error getting messages for session ${sessionId}: ${error}\n`
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
50
lib/types.ts
50
lib/types.ts
@@ -1,26 +1,5 @@
|
||||
import { Session as NextAuthSession } from "next-auth";
|
||||
|
||||
// Standardized enums
|
||||
export type SentimentCategory = "positive" | "neutral" | "negative";
|
||||
|
||||
export const VALID_CATEGORIES = [
|
||||
"Schedule & Hours",
|
||||
"Leave & Vacation",
|
||||
"Sick Leave & Recovery",
|
||||
"Salary & Compensation",
|
||||
"Contract & Hours",
|
||||
"Onboarding",
|
||||
"Offboarding",
|
||||
"Workwear & Staff Pass",
|
||||
"Team & Contacts",
|
||||
"Personal Questions",
|
||||
"Access & Login",
|
||||
"Social questions",
|
||||
"Unrecognized / Other",
|
||||
] as const;
|
||||
|
||||
export type ValidCategory = (typeof VALID_CATEGORIES)[number];
|
||||
|
||||
export interface UserSession extends NextAuthSession {
|
||||
user: {
|
||||
id?: string;
|
||||
@@ -56,16 +35,6 @@ export interface User {
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
id: string;
|
||||
sessionId: string;
|
||||
timestamp: Date;
|
||||
role: string; // "User", "Assistant", "System", etc.
|
||||
content: string;
|
||||
order: number; // Order within the conversation (0, 1, 2, ...)
|
||||
createdAt: Date;
|
||||
}
|
||||
|
||||
export interface ChatSession {
|
||||
id: string;
|
||||
sessionId: string;
|
||||
@@ -75,7 +44,7 @@ export interface ChatSession {
|
||||
language?: string | null;
|
||||
country?: string | null;
|
||||
ipAddress?: string | null;
|
||||
sentiment?: string | null;
|
||||
sentiment?: number | null;
|
||||
messagesSent?: number;
|
||||
startTime: Date;
|
||||
endTime?: Date | null;
|
||||
@@ -90,11 +59,7 @@ export interface ChatSession {
|
||||
tokensEur?: number;
|
||||
initialMsg?: string;
|
||||
fullTranscriptUrl?: string | null;
|
||||
processed?: boolean | null; // Flag for post-processing status
|
||||
validData?: boolean | null; // Flag for data quality (false = exclude from analytics)
|
||||
questions?: string | null; // JSON array of questions asked by user
|
||||
summary?: string | null; // Brief summary of the conversation
|
||||
messages?: Message[]; // Parsed messages from transcript
|
||||
transcriptContent?: string | null;
|
||||
}
|
||||
|
||||
export interface SessionQuery {
|
||||
@@ -140,11 +105,6 @@ export interface WordCloudWord {
|
||||
value: number;
|
||||
}
|
||||
|
||||
export interface TopQuestion {
|
||||
question: string;
|
||||
count: number;
|
||||
}
|
||||
|
||||
export interface MetricsResult {
|
||||
totalSessions: number;
|
||||
avgSessionsPerDay: number;
|
||||
@@ -179,12 +139,6 @@ export interface MetricsResult {
|
||||
avgSessionTimeTrend?: number; // e.g., percentage change in avgSessionLength
|
||||
avgResponseTimeTrend?: number; // e.g., percentage change in avgResponseTime
|
||||
|
||||
// New metrics for enhanced dashboard
|
||||
avgDailyCosts?: number; // Average daily costs in euros
|
||||
peakUsageTime?: string; // Peak usage time (e.g., "14:00-15:00")
|
||||
resolvedChatsPercentage?: number; // Percentage of resolved chats
|
||||
topQuestions?: TopQuestion[]; // Top 5 most asked questions
|
||||
|
||||
// Debug properties
|
||||
totalSessionDuration?: number;
|
||||
validSessionsForDuration?: number;
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
import { clsx, type ClassValue } from "clsx"
|
||||
import { twMerge } from "tailwind-merge"
|
||||
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs))
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
import { prisma } from "./prisma";import { processUnprocessedSessions } from "./processingSchedulerNoCron";import { fileURLToPath } from "url";import { dirname, join } from "path";import { readFileSync } from "fs";const __filename = fileURLToPath(import.meta.url);const __dirname = dirname(__filename);const envPath = join(__dirname, "..", ".env.local");try { const envFile = readFileSync(envPath, "utf8"); const envVars = envFile .split("\n") .filter((line) => line.trim() && !line.startsWith("#")); envVars.forEach((line) => { const [key, ...valueParts] = line.split("="); if (key && valueParts.length > 0) { const value = valueParts.join("=").trim(); if (!process.env[key.trim()]) { process.env[key.trim()] = value; } } });} catch (error) {}async function fetchTranscriptContent( url: string, username?: string, password?: string): Promise<string | null> { try { const authHeader = username && password ? "Basic " + Buffer.from(`${username}:${password}`).toString("base64") : undefined; const response = await fetch(url, { headers: authHeader ? { Authorization: authHeader } : {}, }); if (!response.ok) { process.stderr.write( `Error fetching transcript: ${response.statusText}\n` ); return null; } return await response.text(); } catch (error) { process.stderr.write(`Failed to fetch transcript: ${error}\n`); return null; }}export async function triggerCompleteWorkflow(): Promise<{ message: string }> { try { const sessionsWithoutMessages = await prisma.session.count({ where: { messages: { none: {} }, fullTranscriptUrl: { not: null } } }); if (sessionsWithoutMessages > 0) { console.log(`[Complete Workflow] Fetching transcripts for ${sessionsWithoutMessages} sessions`); const sessionsToProcess = await prisma.session.findMany({ where: { AND: [ { fullTranscriptUrl: { not: null } }, { messages: { none: {} } }, ], }, include: { company: true, }, take: 20, }); for (const session of sessionsToProcess) { try { if (!session.fullTranscriptUrl) continue; const transcriptContent = await fetchTranscriptContent( session.fullTranscriptUrl, session.company.csvUsername || undefined, session.company.csvPassword || undefined ); if (!transcriptContent) { console.log(`No transcript content for session ${session.id}`); continue; } const lines = transcriptContent.split("\n").filter((line) => line.trim()); const messages: Array<{ sessionId: string; role: string; content: string; timestamp: Date; order: number; }> = []; let messageOrder = 0; for (const line of lines) { const timestampMatch = line.match(/^\\[([^\]]+)\\]\\s*([^:]+):\\s*(.+)$/); if (timestampMatch) { const [, timestamp, role, content] = timestampMatch; const dateMatch = timestamp.match(/^(\\d{1,2})-(\\d{1,2})-(\\d{4}) (\\d{1,2}):(\\d{1,2}):(\\d{1,2})$/); let parsedTimestamp = new Date(); if (dateMatch) { const [, day, month, year, hour, minute, second] = dateMatch; parsedTimestamp = new Date( parseInt(year), parseInt(month) - 1, parseInt(day), parseInt(hour), parseInt(minute), parseInt(second) ); } messages.push({ sessionId: session.id, role: role.trim().toLowerCase(), content: content.trim(), timestamp: parsedTimestamp, order: messageOrder++, }); } } if (messages.length > 0) { await prisma.message.createMany({ data: messages as any, }); console.log(`Added ${messages.length} messages for session ${session.id}`); } } catch (error) { console.error(`Error processing session ${session.id}:`, error); } } } const unprocessedWithMessages = await prisma.session.count({ where: { processed: false, messages: { some: {} } } }); if (unprocessedWithMessages > 0) { console.log(`[Complete Workflow] Processing ${unprocessedWithMessages} sessions`); await processUnprocessedSessions(); } return { message: `Complete workflow finished successfully` }; } catch (error) { console.error('[Complete Workflow] Error:', error); throw error; }}
|
||||
@@ -1,3 +1,6 @@
|
||||
-- Initial database schema for LiveDash-Node
|
||||
-- This combines the init migration and transcript_content addition
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Company" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
@@ -34,37 +37,18 @@ CREATE TABLE "Session" (
|
||||
"language" TEXT,
|
||||
"messagesSent" INTEGER,
|
||||
"sentiment" REAL,
|
||||
"sentimentCategory" TEXT,
|
||||
"escalated" BOOLEAN,
|
||||
"forwardedHr" BOOLEAN,
|
||||
"fullTranscriptUrl" TEXT,
|
||||
"transcriptContent" TEXT,
|
||||
"avgResponseTime" REAL,
|
||||
"tokens" INTEGER,
|
||||
"tokensEur" REAL,
|
||||
"category" TEXT,
|
||||
"initialMsg" TEXT,
|
||||
"processed" BOOLEAN NOT NULL DEFAULT false,
|
||||
"validData" BOOLEAN NOT NULL DEFAULT true,
|
||||
"questions" TEXT,
|
||||
"summary" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT "Session_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Message" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"sessionId" TEXT NOT NULL,
|
||||
"timestamp" DATETIME NOT NULL,
|
||||
"role" TEXT NOT NULL,
|
||||
"content" TEXT NOT NULL,
|
||||
"order" INTEGER NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT "Message_sessionId_fkey" FOREIGN KEY ("sessionId") REFERENCES "Session" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Message_sessionId_order_idx" ON "Message"("sessionId", "order");
|
||||
@@ -5,24 +5,11 @@ const nextConfig = {
|
||||
reactStrictMode: true,
|
||||
// Allow cross-origin requests from specific origins in development
|
||||
allowedDevOrigins: [
|
||||
"127.0.0.1",
|
||||
"localhost"
|
||||
"192.168.1.2",
|
||||
"localhost",
|
||||
"propc",
|
||||
"test123.kjanat.com",
|
||||
],
|
||||
// Disable Turbopack for now due to EISDIR error on Windows
|
||||
webpack: (config, { isServer }) => {
|
||||
if (!isServer) {
|
||||
config.resolve.fallback = { fs: false, net: false, tls: false };
|
||||
}
|
||||
return config;
|
||||
},
|
||||
experimental: {
|
||||
appDir: true,
|
||||
serverComponentsExternalPackages: ['@prisma/client', 'bcryptjs'],
|
||||
// disable the new Turbopack engine
|
||||
// This is a temporary workaround for the EISDIR error on Windows
|
||||
// Remove this once the issue is resolved in Next.js or Turbopack
|
||||
turbopack: false,
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
|
||||
10253
package-lock.json
generated
10253
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
106
package.json
106
package.json
@@ -5,80 +5,91 @@
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "next build",
|
||||
"dev": "next dev",
|
||||
"dev:with-server": "tsx server.ts",
|
||||
"format": "npx prettier --write .",
|
||||
"format:check": "npx prettier --check .",
|
||||
"format": "pnpm dlx prettier --write .",
|
||||
"format:check": "pnpm dlx prettier --check .",
|
||||
"format:standard": "pnpm dlx standard . --fix",
|
||||
"lint": "next lint",
|
||||
"lint:fix": "npx eslint --fix .",
|
||||
"lint:fix": "pnpm dlx eslint --fix",
|
||||
"prisma:generate": "prisma generate",
|
||||
"prisma:migrate": "prisma migrate dev",
|
||||
"prisma:seed": "tsx prisma/seed.ts",
|
||||
"prisma:push": "prisma db push",
|
||||
"prisma:seed": "node prisma/seed.mjs",
|
||||
"prisma:studio": "prisma studio",
|
||||
"start": "tsx server.ts",
|
||||
"start": "next start",
|
||||
"lint:md": "markdownlint-cli2 \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
|
||||
"lint:md:fix": "markdownlint-cli2 --fix \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\""
|
||||
"lint:md:fix": "markdownlint-cli2 --fix \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
|
||||
"cf-typegen": "wrangler types",
|
||||
"check": "tsc && wrangler deploy --dry-run",
|
||||
"deploy": "wrangler deploy",
|
||||
"dev": "next dev",
|
||||
"dev:old": "next dev --turbopack",
|
||||
"dev:cf": "wrangler dev",
|
||||
"predeploy": "wrangler d1 migrations apply DB --remote",
|
||||
"seedLocalD1": "wrangler d1 migrations apply DB --local",
|
||||
"d1:list": "wrangler d1 list",
|
||||
"d1:info": "wrangler d1 info d1-notso-livedash",
|
||||
"d1:info:remote": "wrangler d1 info d1-notso-livedash --remote",
|
||||
"d1:query": "node scripts/d1-query.js",
|
||||
"d1:export": "wrangler d1 export d1-notso-livedash",
|
||||
"d1:export:remote": "wrangler d1 export d1-notso-livedash --remote",
|
||||
"d1:backup": "wrangler d1 export d1-notso-livedash --output backups/$(date +%Y%m%d_%H%M%S)_backup.sql",
|
||||
"d1:schema": "wrangler d1 export d1-notso-livedash --no-data --output schema.sql",
|
||||
"d1": "node scripts/d1.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@prisma/client": "^6.10.1",
|
||||
"@prisma/adapter-d1": "^6.8.2",
|
||||
"@prisma/client": "^6.8.2",
|
||||
"@rapideditor/country-coder": "^5.4.0",
|
||||
"@types/d3": "^7.4.3",
|
||||
"@types/d3-cloud": "^1.2.9",
|
||||
"@types/d3-selection": "^3.0.11",
|
||||
"@types/geojson": "^7946.0.16",
|
||||
"@types/leaflet": "^1.9.18",
|
||||
"@types/node-fetch": "^2.6.12",
|
||||
"bcryptjs": "^3.0.2",
|
||||
"chart.js": "^4.0.0",
|
||||
"chart.js": "^4.4.9",
|
||||
"chartjs-plugin-annotation": "^3.1.0",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"csv-parse": "^5.5.0",
|
||||
"csv-parse": "^5.6.0",
|
||||
"d3": "^7.9.0",
|
||||
"d3-cloud": "^1.2.7",
|
||||
"d3-selection": "^3.0.0",
|
||||
"i18n-iso-countries": "^7.14.0",
|
||||
"iso-639-1": "^3.1.5",
|
||||
"leaflet": "^1.9.4",
|
||||
"lucide-react": "^0.523.0",
|
||||
"next": "^15.3.4",
|
||||
"next": "^15.3.3",
|
||||
"next-auth": "^4.24.11",
|
||||
"node-cron": "^4.0.7",
|
||||
"node-cron": "^4.1.0",
|
||||
"node-fetch": "^3.3.2",
|
||||
"picocolors": "^1.1.1",
|
||||
"react": "^19.1.0",
|
||||
"react-chartjs-2": "^5.0.0",
|
||||
"react-chartjs-2": "^5.3.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"react-leaflet": "^5.0.0",
|
||||
"react-markdown": "^10.1.0",
|
||||
"rehype-raw": "^7.0.0",
|
||||
"source-map-js": "^1.2.1",
|
||||
"tailwind-merge": "^3.3.1"
|
||||
"rehype-raw": "^7.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.27.0",
|
||||
"@eslint/js": "^9.28.0",
|
||||
"@playwright/test": "^1.52.0",
|
||||
"@tailwindcss/postcss": "^4.1.7",
|
||||
"@types/bcryptjs": "^2.4.2",
|
||||
"@types/node": "^22.15.21",
|
||||
"@types/node-cron": "^3.0.8",
|
||||
"@types/react": "^19.1.5",
|
||||
"@tailwindcss/postcss": "^4.1.8",
|
||||
"@types/bcryptjs": "^2.4.6",
|
||||
"@types/node": "^22.15.29",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"@types/react": "^19.1.6",
|
||||
"@types/react-dom": "^19.1.5",
|
||||
"@typescript-eslint/eslint-plugin": "^8.32.1",
|
||||
"@typescript-eslint/parser": "^8.32.1",
|
||||
"eslint": "^9.27.0",
|
||||
"eslint-config-next": "^15.3.2",
|
||||
"eslint-plugin-prettier": "^5.4.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.33.0",
|
||||
"@typescript-eslint/parser": "^8.33.0",
|
||||
"eslint": "^9.28.0",
|
||||
"eslint-config-next": "^15.3.3",
|
||||
"eslint-plugin-prettier": "^5.4.1",
|
||||
"markdownlint-cli2": "^0.18.1",
|
||||
"postcss": "^8.5.3",
|
||||
"postcss": "^8.5.4",
|
||||
"prettier": "^3.5.3",
|
||||
"prettier-plugin-jinja-template": "^2.1.0",
|
||||
"prisma": "^6.10.1",
|
||||
"tailwindcss": "^4.1.7",
|
||||
"prisma": "^6.8.2",
|
||||
"tailwindcss": "^4.1.8",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsx": "^4.20.3",
|
||||
"tw-animate-css": "^1.3.4",
|
||||
"typescript": "^5.0.0"
|
||||
"typescript": "^5.8.3",
|
||||
"wrangler": "4.18.0"
|
||||
},
|
||||
"prettier": {
|
||||
"bracketSpacing": true,
|
||||
@@ -128,5 +139,22 @@
|
||||
".git",
|
||||
"*.json"
|
||||
]
|
||||
},
|
||||
"cloudflare": {
|
||||
"label": "Worker + D1 Database",
|
||||
"products": [
|
||||
"Workers",
|
||||
"D1"
|
||||
],
|
||||
"categories": [
|
||||
"storage"
|
||||
],
|
||||
"icon_urls": [
|
||||
"https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/c6fc5da3-1e0a-4608-b2f1-9628577ec800/public",
|
||||
"https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/5ca0ca32-e897-4699-d4c1-6b680512f000/public"
|
||||
],
|
||||
"docs_url": "https://developers.cloudflare.com/d1/",
|
||||
"preview_image_url": "https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/cb7cb0a9-6102-4822-633c-b76b7bb25900/public",
|
||||
"publish": true
|
||||
}
|
||||
}
|
||||
|
||||
174
pages/api/admin/refresh-sessions.ts
Normal file
174
pages/api/admin/refresh-sessions.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
// API route to refresh (fetch+parse+update) session data for a company
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { fetchAndParseCsv } from "../../../lib/csvFetcher";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
|
||||
interface SessionCreateData {
|
||||
id: string;
|
||||
startTime: Date;
|
||||
companyId: string;
|
||||
sessionId?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches transcript content from a URL with optional authentication
|
||||
* @param url The URL to fetch the transcript from
|
||||
* @param username Optional username for Basic Auth
|
||||
* @param password Optional password for Basic Auth
|
||||
* @returns The transcript content or null if fetching fails
|
||||
*/
|
||||
async function fetchTranscriptContent(
|
||||
url: string,
|
||||
username?: string,
|
||||
password?: string
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const authHeader =
|
||||
username && password
|
||||
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||
: undefined;
|
||||
|
||||
const response = await fetch(url, {
|
||||
headers: authHeader ? { Authorization: authHeader } : {},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
process.stderr.write(
|
||||
`Error fetching transcript: ${response.statusText}\n`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
return await response.text();
|
||||
} catch (error) {
|
||||
process.stderr.write(`Failed to fetch transcript: ${error}\n`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
// Check if this is a POST request
|
||||
if (req.method !== "POST") {
|
||||
return res.status(405).json({ error: "Method not allowed" });
|
||||
}
|
||||
|
||||
// Get companyId from body or query
|
||||
let { companyId } = req.body;
|
||||
|
||||
if (!companyId) {
|
||||
// Try to get user from prisma based on session cookie
|
||||
try {
|
||||
const session = await prisma.session.findFirst({
|
||||
orderBy: { createdAt: "desc" },
|
||||
where: {
|
||||
/* Add session check criteria here */
|
||||
},
|
||||
});
|
||||
|
||||
if (session) {
|
||||
companyId = session.companyId;
|
||||
}
|
||||
} catch (error) {
|
||||
// Log error for server-side debugging
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
// Use a server-side logging approach instead of console
|
||||
process.stderr.write(`Error fetching session: ${errorMessage}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!companyId) {
|
||||
return res.status(400).json({ error: "Company ID is required" });
|
||||
}
|
||||
|
||||
const company = await prisma.company.findUnique({ where: { id: companyId } });
|
||||
if (!company) return res.status(404).json({ error: "Company not found" });
|
||||
|
||||
try {
|
||||
const sessions = await fetchAndParseCsv(
|
||||
company.csvUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
|
||||
// Replace all session rows for this company (for demo simplicity)
|
||||
await prisma.session.deleteMany({ where: { companyId: company.id } });
|
||||
|
||||
for (const session of sessions) {
|
||||
const sessionData: SessionCreateData = {
|
||||
...session,
|
||||
companyId: company.id,
|
||||
id:
|
||||
session.id ||
|
||||
session.sessionId ||
|
||||
`sess_${Date.now()}_${Math.random().toString(36).substring(2, 7)}`,
|
||||
// Ensure startTime is not undefined
|
||||
startTime: session.startTime || new Date(),
|
||||
};
|
||||
|
||||
// Validate dates to prevent "Invalid Date" errors
|
||||
const startTime =
|
||||
sessionData.startTime instanceof Date &&
|
||||
!isNaN(sessionData.startTime.getTime())
|
||||
? sessionData.startTime
|
||||
: new Date();
|
||||
|
||||
const endTime =
|
||||
session.endTime instanceof Date && !isNaN(session.endTime.getTime())
|
||||
? session.endTime
|
||||
: new Date();
|
||||
|
||||
// Fetch transcript content if URL is available
|
||||
let transcriptContent: string | null = null;
|
||||
if (session.fullTranscriptUrl) {
|
||||
transcriptContent = await fetchTranscriptContent(
|
||||
session.fullTranscriptUrl,
|
||||
company.csvUsername as string | undefined,
|
||||
company.csvPassword as string | undefined
|
||||
);
|
||||
}
|
||||
|
||||
// Only include fields that are properly typed for Prisma
|
||||
await prisma.session.create({
|
||||
data: {
|
||||
id: sessionData.id,
|
||||
companyId: sessionData.companyId,
|
||||
startTime: startTime,
|
||||
endTime: endTime,
|
||||
ipAddress: session.ipAddress || null,
|
||||
country: session.country || null,
|
||||
language: session.language || null,
|
||||
messagesSent:
|
||||
typeof session.messagesSent === "number" ? session.messagesSent : 0,
|
||||
sentiment:
|
||||
typeof session.sentiment === "number" ? session.sentiment : null,
|
||||
escalated:
|
||||
typeof session.escalated === "boolean" ? session.escalated : null,
|
||||
forwardedHr:
|
||||
typeof session.forwardedHr === "boolean"
|
||||
? session.forwardedHr
|
||||
: null,
|
||||
fullTranscriptUrl: session.fullTranscriptUrl || null,
|
||||
transcriptContent: transcriptContent, // Add the transcript content
|
||||
avgResponseTime:
|
||||
typeof session.avgResponseTime === "number"
|
||||
? session.avgResponseTime
|
||||
: null,
|
||||
tokens: typeof session.tokens === "number" ? session.tokens : null,
|
||||
tokensEur:
|
||||
typeof session.tokensEur === "number" ? session.tokensEur : null,
|
||||
category: session.category || null,
|
||||
initialMsg: session.initialMsg || null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
res.json({ ok: true, imported: sessions.length });
|
||||
} catch (e) {
|
||||
const error = e instanceof Error ? e.message : "An unknown error occurred";
|
||||
res.status(500).json({ error });
|
||||
}
|
||||
}
|
||||
104
pages/api/auth/[...nextauth].ts
Normal file
104
pages/api/auth/[...nextauth].ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import NextAuth, { NextAuthOptions } from "next-auth";
|
||||
import CredentialsProvider from "next-auth/providers/credentials";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
|
||||
// Define the shape of the JWT token
|
||||
declare module "next-auth/jwt" {
|
||||
interface JWT {
|
||||
companyId: string;
|
||||
role: string;
|
||||
}
|
||||
}
|
||||
|
||||
// Define the shape of the session object
|
||||
declare module "next-auth" {
|
||||
interface Session {
|
||||
user: {
|
||||
id?: string;
|
||||
name?: string;
|
||||
email?: string;
|
||||
image?: string;
|
||||
companyId: string;
|
||||
role: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface User {
|
||||
id: string;
|
||||
email: string;
|
||||
companyId: string;
|
||||
role: string;
|
||||
}
|
||||
}
|
||||
|
||||
export const authOptions: NextAuthOptions = {
|
||||
providers: [
|
||||
CredentialsProvider({
|
||||
name: "Credentials",
|
||||
credentials: {
|
||||
email: { label: "Email", type: "text" },
|
||||
password: { label: "Password", type: "password" },
|
||||
},
|
||||
async authorize(credentials) {
|
||||
if (!credentials?.email || !credentials?.password) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: credentials.email },
|
||||
});
|
||||
|
||||
if (!user) return null;
|
||||
|
||||
const valid = await bcrypt.compare(credentials.password, user.password);
|
||||
if (!valid) return null;
|
||||
|
||||
return {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
companyId: user.companyId,
|
||||
role: user.role,
|
||||
};
|
||||
},
|
||||
}),
|
||||
],
|
||||
session: {
|
||||
strategy: "jwt",
|
||||
maxAge: 30 * 24 * 60 * 60, // 30 days
|
||||
},
|
||||
cookies: {
|
||||
sessionToken: {
|
||||
name: `next-auth.session-token`,
|
||||
options: {
|
||||
httpOnly: true,
|
||||
sameSite: "lax",
|
||||
path: "/",
|
||||
secure: process.env.NODE_ENV === "production",
|
||||
},
|
||||
},
|
||||
},
|
||||
callbacks: {
|
||||
async jwt({ token, user }) {
|
||||
if (user) {
|
||||
token.companyId = user.companyId;
|
||||
token.role = user.role;
|
||||
}
|
||||
return token;
|
||||
},
|
||||
async session({ session, token }) {
|
||||
if (token && session.user) {
|
||||
session.user.companyId = token.companyId;
|
||||
session.user.role = token.role;
|
||||
}
|
||||
return session;
|
||||
},
|
||||
},
|
||||
pages: {
|
||||
signIn: "/login",
|
||||
},
|
||||
secret: process.env.NEXTAUTH_SECRET,
|
||||
debug: process.env.NODE_ENV === "development",
|
||||
};
|
||||
|
||||
export default NextAuth(authOptions);
|
||||
36
pages/api/dashboard/config.ts
Normal file
36
pages/api/dashboard/config.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
// API endpoint: update company CSV URL config
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
const session = await getServerSession(req, res, authOptions);
|
||||
if (!session?.user) return res.status(401).json({ error: "Not logged in" });
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) return res.status(401).json({ error: "No user" });
|
||||
|
||||
if (req.method === "POST") {
|
||||
const { csvUrl } = req.body;
|
||||
await prisma.company.update({
|
||||
where: { id: user.companyId },
|
||||
data: { csvUrl },
|
||||
});
|
||||
res.json({ ok: true });
|
||||
} else if (req.method === "GET") {
|
||||
// Get company data
|
||||
const company = await prisma.company.findUnique({
|
||||
where: { id: user.companyId },
|
||||
});
|
||||
res.json({ company });
|
||||
} else {
|
||||
res.status(405).end();
|
||||
}
|
||||
}
|
||||
83
pages/api/dashboard/metrics.ts
Normal file
83
pages/api/dashboard/metrics.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
// API endpoint: return metrics for current company
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import { sessionMetrics } from "../../../lib/metrics";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
import { ChatSession } from "../../../lib/types"; // Import ChatSession
|
||||
|
||||
interface SessionUser {
|
||||
email: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
interface SessionData {
|
||||
user: SessionUser;
|
||||
}
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
const session = (await getServerSession(
|
||||
req,
|
||||
res,
|
||||
authOptions
|
||||
)) as SessionData | null;
|
||||
if (!session?.user) return res.status(401).json({ error: "Not logged in" });
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email },
|
||||
include: { company: true },
|
||||
});
|
||||
|
||||
if (!user) return res.status(401).json({ error: "No user" });
|
||||
|
||||
const prismaSessions = await prisma.session.findMany({
|
||||
where: { companyId: user.companyId },
|
||||
});
|
||||
|
||||
// Convert Prisma sessions to ChatSession[] type for sessionMetrics
|
||||
const chatSessions: ChatSession[] = prismaSessions.map((ps) => ({
|
||||
id: ps.id, // Map Prisma's id to ChatSession.id
|
||||
sessionId: ps.id, // Map Prisma's id to ChatSession.sessionId
|
||||
companyId: ps.companyId,
|
||||
startTime: new Date(ps.startTime), // Ensure startTime is a Date object
|
||||
endTime: ps.endTime ? new Date(ps.endTime) : null, // Ensure endTime is a Date object or null
|
||||
transcriptContent: ps.transcriptContent || "", // Ensure transcriptContent is a string
|
||||
createdAt: new Date(ps.createdAt), // Map Prisma's createdAt
|
||||
updatedAt: new Date(ps.createdAt), // Use createdAt for updatedAt as Session model doesn't have updatedAt
|
||||
category: ps.category || undefined,
|
||||
language: ps.language || undefined,
|
||||
country: ps.country || undefined,
|
||||
ipAddress: ps.ipAddress || undefined,
|
||||
sentiment: ps.sentiment === null ? undefined : ps.sentiment,
|
||||
messagesSent: ps.messagesSent === null ? undefined : ps.messagesSent, // Handle null messagesSent
|
||||
avgResponseTime:
|
||||
ps.avgResponseTime === null ? undefined : ps.avgResponseTime,
|
||||
tokens: ps.tokens === null ? undefined : ps.tokens,
|
||||
tokensEur: ps.tokensEur === null ? undefined : ps.tokensEur,
|
||||
escalated: ps.escalated || false,
|
||||
forwardedHr: ps.forwardedHr || false,
|
||||
initialMsg: ps.initialMsg || undefined,
|
||||
fullTranscriptUrl: ps.fullTranscriptUrl || undefined,
|
||||
// userId is missing in Prisma Session model, assuming it's not strictly needed for metrics or can be null
|
||||
userId: undefined, // Or some other default/mapping if available
|
||||
}));
|
||||
|
||||
// Pass company config to metrics
|
||||
const companyConfigForMetrics = {
|
||||
sentimentAlert:
|
||||
user.company.sentimentAlert === null
|
||||
? undefined
|
||||
: user.company.sentimentAlert,
|
||||
};
|
||||
|
||||
const metrics = sessionMetrics(chatSessions, companyConfigForMetrics);
|
||||
|
||||
res.json({
|
||||
metrics,
|
||||
csvUrl: user.company.csvUrl,
|
||||
company: user.company,
|
||||
});
|
||||
}
|
||||
76
pages/api/dashboard/session-filter-options.ts
Normal file
76
pages/api/dashboard/session-filter-options.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth/next";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import { SessionFilterOptions } from "../../../lib/types";
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse<
|
||||
SessionFilterOptions | { error: string; details?: string }
|
||||
>
|
||||
) {
|
||||
if (req.method !== "GET") {
|
||||
return res.status(405).json({ error: "Method not allowed" });
|
||||
}
|
||||
|
||||
const authSession = await getServerSession(req, res, authOptions);
|
||||
|
||||
if (!authSession || !authSession.user?.companyId) {
|
||||
return res.status(401).json({ error: "Unauthorized" });
|
||||
}
|
||||
|
||||
const companyId = authSession.user.companyId;
|
||||
|
||||
try {
|
||||
const categories = await prisma.session.findMany({
|
||||
where: {
|
||||
companyId,
|
||||
category: {
|
||||
not: null, // Ensure category is not null
|
||||
},
|
||||
},
|
||||
distinct: ["category"],
|
||||
select: {
|
||||
category: true,
|
||||
},
|
||||
orderBy: {
|
||||
category: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
const languages = await prisma.session.findMany({
|
||||
where: {
|
||||
companyId,
|
||||
language: {
|
||||
not: null, // Ensure language is not null
|
||||
},
|
||||
},
|
||||
distinct: ["language"],
|
||||
select: {
|
||||
language: true,
|
||||
},
|
||||
orderBy: {
|
||||
language: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
const distinctCategories = categories
|
||||
.map((s) => s.category)
|
||||
.filter(Boolean) as string[]; // Filter out any nulls and assert as string[]
|
||||
const distinctLanguages = languages
|
||||
.map((s) => s.language)
|
||||
.filter(Boolean) as string[]; // Filter out any nulls and assert as string[]
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.json({ categories: distinctCategories, languages: distinctLanguages });
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "An unknown error occurred";
|
||||
return res.status(500).json({
|
||||
error: "Failed to fetch filter options",
|
||||
details: errorMessage,
|
||||
});
|
||||
}
|
||||
}
|
||||
68
pages/api/dashboard/session/[id].ts
Normal file
68
pages/api/dashboard/session/[id].ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { prisma } from "../../../../lib/prisma";
|
||||
import { ChatSession } from "../../../../lib/types";
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
if (req.method !== "GET") {
|
||||
return res.status(405).json({ error: "Method not allowed" });
|
||||
}
|
||||
|
||||
const { id } = req.query;
|
||||
|
||||
if (!id || typeof id !== "string") {
|
||||
return res.status(400).json({ error: "Session ID is required" });
|
||||
}
|
||||
|
||||
try {
|
||||
const prismaSession = await prisma.session.findUnique({
|
||||
where: { id },
|
||||
});
|
||||
|
||||
if (!prismaSession) {
|
||||
return res.status(404).json({ error: "Session not found" });
|
||||
}
|
||||
|
||||
// Map Prisma session object to ChatSession type
|
||||
const session: ChatSession = {
|
||||
// Spread prismaSession to include all its properties
|
||||
...prismaSession,
|
||||
// Override properties that need conversion or specific mapping
|
||||
id: prismaSession.id, // ChatSession.id from Prisma.Session.id
|
||||
sessionId: prismaSession.id, // ChatSession.sessionId from Prisma.Session.id
|
||||
startTime: new Date(prismaSession.startTime),
|
||||
endTime: prismaSession.endTime ? new Date(prismaSession.endTime) : null,
|
||||
createdAt: new Date(prismaSession.createdAt),
|
||||
// Prisma.Session does not have an `updatedAt` field. We'll use `createdAt` as a fallback.
|
||||
// Or, if your business logic implies an update timestamp elsewhere, use that.
|
||||
updatedAt: new Date(prismaSession.createdAt), // Fallback to createdAt
|
||||
// Prisma.Session does not have a `userId` field.
|
||||
userId: null, // Explicitly set to null or map if available from another source
|
||||
// Ensure nullable fields from Prisma are correctly mapped to ChatSession's optional or nullable fields
|
||||
category: prismaSession.category ?? null,
|
||||
language: prismaSession.language ?? null,
|
||||
country: prismaSession.country ?? null,
|
||||
ipAddress: prismaSession.ipAddress ?? null,
|
||||
sentiment: prismaSession.sentiment ?? null,
|
||||
messagesSent: prismaSession.messagesSent ?? undefined, // Use undefined if ChatSession expects number | undefined
|
||||
avgResponseTime: prismaSession.avgResponseTime ?? null,
|
||||
escalated: prismaSession.escalated ?? undefined,
|
||||
forwardedHr: prismaSession.forwardedHr ?? undefined,
|
||||
tokens: prismaSession.tokens ?? undefined,
|
||||
tokensEur: prismaSession.tokensEur ?? undefined,
|
||||
initialMsg: prismaSession.initialMsg ?? undefined,
|
||||
fullTranscriptUrl: prismaSession.fullTranscriptUrl ?? null,
|
||||
transcriptContent: prismaSession.transcriptContent ?? null,
|
||||
};
|
||||
|
||||
return res.status(200).json({ session });
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "An unknown error occurred";
|
||||
return res
|
||||
.status(500)
|
||||
.json({ error: "Failed to fetch session", details: errorMessage });
|
||||
}
|
||||
}
|
||||
164
pages/api/dashboard/sessions.ts
Normal file
164
pages/api/dashboard/sessions.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth/next";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import {
|
||||
ChatSession,
|
||||
SessionApiResponse,
|
||||
SessionQuery,
|
||||
} from "../../../lib/types";
|
||||
import { Prisma } from "@prisma/client";
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse<SessionApiResponse | { error: string; details?: string }>
|
||||
) {
|
||||
if (req.method !== "GET") {
|
||||
return res.status(405).json({ error: "Method not allowed" });
|
||||
}
|
||||
|
||||
const authSession = await getServerSession(req, res, authOptions);
|
||||
|
||||
if (!authSession || !authSession.user?.companyId) {
|
||||
return res.status(401).json({ error: "Unauthorized" });
|
||||
}
|
||||
|
||||
const companyId = authSession.user.companyId;
|
||||
const {
|
||||
searchTerm,
|
||||
category,
|
||||
language,
|
||||
startDate,
|
||||
endDate,
|
||||
sortKey,
|
||||
sortOrder,
|
||||
page: queryPage,
|
||||
pageSize: queryPageSize,
|
||||
} = req.query as SessionQuery;
|
||||
|
||||
const page = Number(queryPage) || 1;
|
||||
const pageSize = Number(queryPageSize) || 10;
|
||||
|
||||
try {
|
||||
const whereClause: Prisma.SessionWhereInput = { companyId };
|
||||
|
||||
// Search Term
|
||||
if (
|
||||
searchTerm &&
|
||||
typeof searchTerm === "string" &&
|
||||
searchTerm.trim() !== ""
|
||||
) {
|
||||
const searchConditions = [
|
||||
{ id: { contains: searchTerm } },
|
||||
{ category: { contains: searchTerm } },
|
||||
{ initialMsg: { contains: searchTerm } },
|
||||
{ transcriptContent: { contains: searchTerm } },
|
||||
];
|
||||
whereClause.OR = searchConditions;
|
||||
}
|
||||
|
||||
// Category Filter
|
||||
if (category && typeof category === "string" && category.trim() !== "") {
|
||||
whereClause.category = category;
|
||||
}
|
||||
|
||||
// Language Filter
|
||||
if (language && typeof language === "string" && language.trim() !== "") {
|
||||
whereClause.language = language;
|
||||
}
|
||||
|
||||
// Date Range Filter
|
||||
if (startDate && typeof startDate === "string") {
|
||||
whereClause.startTime = {
|
||||
...((whereClause.startTime as object) || {}),
|
||||
gte: new Date(startDate),
|
||||
};
|
||||
}
|
||||
if (endDate && typeof endDate === "string") {
|
||||
const inclusiveEndDate = new Date(endDate);
|
||||
inclusiveEndDate.setDate(inclusiveEndDate.getDate() + 1);
|
||||
whereClause.startTime = {
|
||||
...((whereClause.startTime as object) || {}),
|
||||
lt: inclusiveEndDate,
|
||||
};
|
||||
}
|
||||
|
||||
// Sorting
|
||||
const validSortKeys: { [key: string]: string } = {
|
||||
startTime: "startTime",
|
||||
category: "category",
|
||||
language: "language",
|
||||
sentiment: "sentiment",
|
||||
messagesSent: "messagesSent",
|
||||
avgResponseTime: "avgResponseTime",
|
||||
};
|
||||
|
||||
let orderByCondition:
|
||||
| Prisma.SessionOrderByWithRelationInput
|
||||
| Prisma.SessionOrderByWithRelationInput[];
|
||||
|
||||
const primarySortField =
|
||||
sortKey && typeof sortKey === "string" && validSortKeys[sortKey]
|
||||
? validSortKeys[sortKey]
|
||||
: "startTime"; // Default to startTime field if sortKey is invalid/missing
|
||||
|
||||
const primarySortOrder =
|
||||
sortOrder === "asc" || sortOrder === "desc" ? sortOrder : "desc"; // Default to desc order
|
||||
|
||||
if (primarySortField === "startTime") {
|
||||
// If sorting by startTime, it's the only sort criteria
|
||||
orderByCondition = { [primarySortField]: primarySortOrder };
|
||||
} else {
|
||||
// If sorting by another field, use startTime: "desc" as secondary sort
|
||||
orderByCondition = [
|
||||
{ [primarySortField]: primarySortOrder },
|
||||
{ startTime: "desc" },
|
||||
];
|
||||
}
|
||||
// Note: If sortKey was initially undefined or invalid, primarySortField defaults to "startTime",
|
||||
// and primarySortOrder defaults to "desc". This makes orderByCondition = { startTime: "desc" },
|
||||
// which is the correct overall default sort.
|
||||
|
||||
const prismaSessions = await prisma.session.findMany({
|
||||
where: whereClause,
|
||||
orderBy: orderByCondition,
|
||||
skip: (page - 1) * pageSize,
|
||||
take: pageSize,
|
||||
});
|
||||
|
||||
const totalSessions = await prisma.session.count({ where: whereClause });
|
||||
|
||||
const sessions: ChatSession[] = prismaSessions.map((ps) => ({
|
||||
id: ps.id,
|
||||
sessionId: ps.id,
|
||||
companyId: ps.companyId,
|
||||
startTime: new Date(ps.startTime),
|
||||
endTime: ps.endTime ? new Date(ps.endTime) : null,
|
||||
createdAt: new Date(ps.createdAt),
|
||||
updatedAt: new Date(ps.createdAt),
|
||||
userId: null,
|
||||
category: ps.category ?? null,
|
||||
language: ps.language ?? null,
|
||||
country: ps.country ?? null,
|
||||
ipAddress: ps.ipAddress ?? null,
|
||||
sentiment: ps.sentiment ?? null,
|
||||
messagesSent: ps.messagesSent ?? undefined,
|
||||
avgResponseTime: ps.avgResponseTime ?? null,
|
||||
escalated: ps.escalated ?? undefined,
|
||||
forwardedHr: ps.forwardedHr ?? undefined,
|
||||
tokens: ps.tokens ?? undefined,
|
||||
tokensEur: ps.tokensEur ?? undefined,
|
||||
initialMsg: ps.initialMsg ?? undefined,
|
||||
fullTranscriptUrl: ps.fullTranscriptUrl ?? null,
|
||||
transcriptContent: ps.transcriptContent ?? null,
|
||||
}));
|
||||
|
||||
return res.status(200).json({ sessions, totalSessions });
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "An unknown error occurred";
|
||||
return res
|
||||
.status(500)
|
||||
.json({ error: "Failed to fetch sessions", details: errorMessage });
|
||||
}
|
||||
}
|
||||
37
pages/api/dashboard/settings.ts
Normal file
37
pages/api/dashboard/settings.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
const session = await getServerSession(req, res, authOptions);
|
||||
if (!session?.user || session.user.role !== "admin")
|
||||
return res.status(403).json({ error: "Forbidden" });
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) return res.status(401).json({ error: "No user" });
|
||||
|
||||
if (req.method === "POST") {
|
||||
const { csvUrl, csvUsername, csvPassword, sentimentThreshold } = req.body;
|
||||
await prisma.company.update({
|
||||
where: { id: user.companyId },
|
||||
data: {
|
||||
csvUrl,
|
||||
csvUsername,
|
||||
...(csvPassword ? { csvPassword } : {}),
|
||||
sentimentAlert: sentimentThreshold
|
||||
? parseFloat(sentimentThreshold)
|
||||
: null,
|
||||
},
|
||||
});
|
||||
res.json({ ok: true });
|
||||
} else {
|
||||
res.status(405).end();
|
||||
}
|
||||
}
|
||||
59
pages/api/dashboard/users.ts
Normal file
59
pages/api/dashboard/users.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import crypto from "crypto";
|
||||
import { getServerSession } from "next-auth";
|
||||
import { prisma } from "../../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
import { authOptions } from "../auth/[...nextauth]";
|
||||
// User type from prisma is used instead of the one in lib/types
|
||||
|
||||
interface UserBasicInfo {
|
||||
id: string;
|
||||
email: string;
|
||||
role: string;
|
||||
}
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
const session = await getServerSession(req, res, authOptions);
|
||||
if (!session?.user || session.user.role !== "admin")
|
||||
return res.status(403).json({ error: "Forbidden" });
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { email: session.user.email as string },
|
||||
});
|
||||
|
||||
if (!user) return res.status(401).json({ error: "No user" });
|
||||
|
||||
if (req.method === "GET") {
|
||||
const users = await prisma.user.findMany({
|
||||
where: { companyId: user.companyId },
|
||||
});
|
||||
|
||||
const mappedUsers: UserBasicInfo[] = users.map((u) => ({
|
||||
id: u.id,
|
||||
email: u.email,
|
||||
role: u.role,
|
||||
}));
|
||||
|
||||
res.json({ users: mappedUsers });
|
||||
} else if (req.method === "POST") {
|
||||
const { email, role } = req.body;
|
||||
if (!email || !role)
|
||||
return res.status(400).json({ error: "Missing fields" });
|
||||
const exists = await prisma.user.findUnique({ where: { email } });
|
||||
if (exists) return res.status(409).json({ error: "Email exists" });
|
||||
const tempPassword = crypto.randomBytes(12).toString("base64").slice(0, 12); // secure random initial password
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
email,
|
||||
password: await bcrypt.hash(tempPassword, 10),
|
||||
companyId: user.companyId,
|
||||
role,
|
||||
},
|
||||
});
|
||||
// TODO: Email user their temp password (stub, for demo) - Implement a robust and secure email sending mechanism. Consider using a transactional email service.
|
||||
res.json({ ok: true, tempPassword });
|
||||
} else res.status(405).end();
|
||||
}
|
||||
31
pages/api/forgot-password.ts
Normal file
31
pages/api/forgot-password.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { prisma } from "../../lib/prisma";
|
||||
import { sendEmail } from "../../lib/sendEmail";
|
||||
import crypto from "crypto";
|
||||
import type { NextApiRequest, NextApiResponse } from "next";
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
) {
|
||||
if (req.method !== "POST") {
|
||||
res.setHeader("Allow", ["POST"]);
|
||||
return res.status(405).end(`Method ${req.method} Not Allowed`);
|
||||
}
|
||||
|
||||
// Type the body with a type assertion
|
||||
const { email } = req.body as { email: string };
|
||||
|
||||
const user = await prisma.user.findUnique({ where: { email } });
|
||||
if (!user) return res.status(200).end(); // always 200 for privacy
|
||||
|
||||
const token = crypto.randomBytes(32).toString("hex");
|
||||
const expiry = new Date(Date.now() + 1000 * 60 * 30); // 30 min expiry
|
||||
await prisma.user.update({
|
||||
where: { email },
|
||||
data: { resetToken: token, resetTokenExpiry: expiry },
|
||||
});
|
||||
|
||||
const resetUrl = `${process.env.NEXTAUTH_URL || "http://localhost:3000"}/reset-password?token=${token}`;
|
||||
await sendEmail(email, "Password Reset", `Reset your password: ${resetUrl}`);
|
||||
res.status(200).end();
|
||||
}
|
||||
56
pages/api/register.ts
Normal file
56
pages/api/register.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { NextApiRequest, NextApiResponse } from "next";
|
||||
import { prisma } from "../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
import { ApiResponse } from "../../lib/types";
|
||||
|
||||
interface RegisterRequestBody {
|
||||
email: string;
|
||||
password: string;
|
||||
company: string;
|
||||
csvUrl?: string;
|
||||
}
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse<ApiResponse<{ success: boolean } | { error: string }>>
|
||||
) {
|
||||
if (req.method !== "POST") return res.status(405).end();
|
||||
|
||||
const { email, password, company, csvUrl } = req.body as RegisterRequestBody;
|
||||
|
||||
if (!email || !password || !company) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: "Missing required fields",
|
||||
});
|
||||
}
|
||||
|
||||
// Check if email exists
|
||||
const exists = await prisma.user.findUnique({
|
||||
where: { email },
|
||||
});
|
||||
|
||||
if (exists) {
|
||||
return res.status(409).json({
|
||||
success: false,
|
||||
error: "Email already exists",
|
||||
});
|
||||
}
|
||||
|
||||
const newCompany = await prisma.company.create({
|
||||
data: { name: company, csvUrl: csvUrl || "" },
|
||||
});
|
||||
const hashed = await bcrypt.hash(password, 10);
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
email,
|
||||
password: hashed,
|
||||
companyId: newCompany.id,
|
||||
role: "admin",
|
||||
},
|
||||
});
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
data: { success: true },
|
||||
});
|
||||
}
|
||||
63
pages/api/reset-password.ts
Normal file
63
pages/api/reset-password.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { prisma } from "../../lib/prisma";
|
||||
import bcrypt from "bcryptjs";
|
||||
import type { NextApiRequest, NextApiResponse } from "next"; // Import official Next.js types
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest, // Use official NextApiRequest
|
||||
res: NextApiResponse // Use official NextApiResponse
|
||||
) {
|
||||
if (req.method !== "POST") {
|
||||
res.setHeader("Allow", ["POST"]); // Good practice to set Allow header for 405
|
||||
return res.status(405).end(`Method ${req.method} Not Allowed`);
|
||||
}
|
||||
|
||||
// It's good practice to explicitly type the expected body for clarity and safety
|
||||
const { token, password } = req.body as { token?: string; password?: string };
|
||||
|
||||
if (!token || !password) {
|
||||
return res.status(400).json({ error: "Token and password are required." });
|
||||
}
|
||||
|
||||
if (password.length < 8) {
|
||||
// Example: Add password complexity rule
|
||||
return res
|
||||
.status(400)
|
||||
.json({ error: "Password must be at least 8 characters long." });
|
||||
}
|
||||
|
||||
try {
|
||||
const user = await prisma.user.findFirst({
|
||||
where: {
|
||||
resetToken: token,
|
||||
resetTokenExpiry: { gte: new Date() },
|
||||
},
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return res.status(400).json({
|
||||
error: "Invalid or expired token. Please request a new password reset.",
|
||||
});
|
||||
}
|
||||
|
||||
const hash = await bcrypt.hash(password, 10);
|
||||
await prisma.user.update({
|
||||
where: { id: user.id },
|
||||
data: {
|
||||
password: hash,
|
||||
resetToken: null,
|
||||
resetTokenExpiry: null,
|
||||
},
|
||||
});
|
||||
|
||||
// Instead of just res.status(200).end(), send a success message
|
||||
return res
|
||||
.status(200)
|
||||
.json({ message: "Password has been reset successfully." });
|
||||
} catch (error) {
|
||||
console.error("Reset password error:", error); // Log the error for server-side debugging
|
||||
// Provide a generic error message to the client
|
||||
return res.status(500).json({
|
||||
error: "An internal server error occurred. Please try again later.",
|
||||
});
|
||||
}
|
||||
}
|
||||
6883
pnpm-lock.yaml
generated
Normal file
6883
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -33,7 +33,7 @@ CREATE TABLE "Session" (
|
||||
"country" TEXT,
|
||||
"language" TEXT,
|
||||
"messagesSent" INTEGER,
|
||||
"sentiment" TEXT,
|
||||
"sentiment" REAL,
|
||||
"escalated" BOOLEAN,
|
||||
"forwardedHr" BOOLEAN,
|
||||
"fullTranscriptUrl" TEXT,
|
||||
@@ -42,29 +42,9 @@ CREATE TABLE "Session" (
|
||||
"tokensEur" REAL,
|
||||
"category" TEXT,
|
||||
"initialMsg" TEXT,
|
||||
"processed" BOOLEAN NOT NULL DEFAULT false,
|
||||
"validData" BOOLEAN NOT NULL DEFAULT true,
|
||||
"questions" JSONB,
|
||||
"summary" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT "Session_companyId_fkey" FOREIGN KEY ("companyId") REFERENCES "Company" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Message" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"sessionId" TEXT NOT NULL,
|
||||
"timestamp" DATETIME NOT NULL,
|
||||
"role" TEXT NOT NULL,
|
||||
"content" TEXT NOT NULL,
|
||||
"order" INTEGER NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT "Message_sessionId_fkey" FOREIGN KEY ("sessionId") REFERENCES "Session" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Message_sessionId_order_idx" ON "Message"("sessionId", "order");
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Session" ADD COLUMN "transcriptContent" TEXT;
|
||||
@@ -1,11 +1,12 @@
|
||||
// Database schema, one company = one org, linked to users and CSV config
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
previewFeatures = ["driverAdapters"]
|
||||
}
|
||||
|
||||
datasource db {
|
||||
provider = "sqlite"
|
||||
url = "file:./dev.db"
|
||||
url = env("DATABASE_URL")
|
||||
}
|
||||
|
||||
model Company {
|
||||
@@ -34,41 +35,24 @@ model User {
|
||||
}
|
||||
|
||||
model Session {
|
||||
id String @id
|
||||
company Company @relation(fields: [companyId], references: [id])
|
||||
id String @id
|
||||
company Company @relation(fields: [companyId], references: [id])
|
||||
companyId String
|
||||
startTime DateTime
|
||||
endTime DateTime?
|
||||
endTime DateTime
|
||||
ipAddress String?
|
||||
country String?
|
||||
language String?
|
||||
messagesSent Int?
|
||||
sentiment String? // "positive", "neutral", or "negative"
|
||||
sentiment Float?
|
||||
escalated Boolean?
|
||||
forwardedHr Boolean?
|
||||
fullTranscriptUrl String?
|
||||
transcriptContent String? // Added to store the fetched transcript
|
||||
avgResponseTime Float?
|
||||
tokens Int?
|
||||
tokensEur Float?
|
||||
category String?
|
||||
initialMsg String?
|
||||
processed Boolean @default(false)
|
||||
validData Boolean @default(true)
|
||||
questions Json?
|
||||
summary String?
|
||||
messages Message[]
|
||||
createdAt DateTime @default(now())
|
||||
}
|
||||
|
||||
model Message {
|
||||
id String @id @default(uuid())
|
||||
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
sessionId String
|
||||
timestamp DateTime // When the message was sent
|
||||
role String // "User", "Assistant", "System", etc.
|
||||
content String // The message content
|
||||
order Int // Order within the conversation (0, 1, 2, ...)
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@index([sessionId, order]) // Index for efficient ordering queries
|
||||
createdAt DateTime @default(now())
|
||||
}
|
||||
|
||||
39
prisma/seed.mjs
Normal file
39
prisma/seed.mjs
Normal file
@@ -0,0 +1,39 @@
|
||||
// Seed script for creating initial data
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import bcrypt from "bcryptjs";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
// Create a company
|
||||
const company = await prisma.company.create({
|
||||
data: {
|
||||
name: "Demo Company",
|
||||
csvUrl: "https://example.com/data.csv", // Replace with a real URL if available
|
||||
},
|
||||
});
|
||||
|
||||
// Create an admin user
|
||||
const hashedPassword = await bcrypt.hash("admin123", 10);
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
email: "admin@demo.com",
|
||||
password: hashedPassword,
|
||||
role: "admin",
|
||||
companyId: company.id,
|
||||
},
|
||||
});
|
||||
|
||||
console.log("Seed data created successfully:");
|
||||
console.log("Company: Demo Company");
|
||||
console.log("Admin user: admin@demo.com (password: admin123)");
|
||||
} catch (error) {
|
||||
console.error("Error seeding database:", error);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,64 +0,0 @@
|
||||
// Check current database status
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function checkDatabaseStatus() {
|
||||
try {
|
||||
console.log('📊 Checking database status...\n');
|
||||
|
||||
// Count total sessions
|
||||
const totalSessions = await prisma.session.count();
|
||||
console.log(`📈 Total sessions: ${totalSessions}`);
|
||||
|
||||
// Count processed vs unprocessed
|
||||
const processedSessions = await prisma.session.count({
|
||||
where: { processed: true }
|
||||
});
|
||||
const unprocessedSessions = await prisma.session.count({
|
||||
where: { processed: false }
|
||||
});
|
||||
|
||||
console.log(`✅ Processed sessions: ${processedSessions}`);
|
||||
console.log(`⏳ Unprocessed sessions: ${unprocessedSessions}`);
|
||||
|
||||
// Count valid vs invalid data
|
||||
const validSessions = await prisma.session.count({
|
||||
where: { validData: true }
|
||||
});
|
||||
const invalidSessions = await prisma.session.count({
|
||||
where: { validData: false }
|
||||
});
|
||||
|
||||
console.log(`🎯 Valid data sessions: ${validSessions}`);
|
||||
console.log(`❌ Invalid data sessions: ${invalidSessions}`);
|
||||
|
||||
// Count sessions with messages
|
||||
const sessionsWithMessages = await prisma.session.count({
|
||||
where: {
|
||||
messages: {
|
||||
some: {}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`💬 Sessions with messages: ${sessionsWithMessages}`);
|
||||
|
||||
// Count companies
|
||||
const totalCompanies = await prisma.company.count();
|
||||
console.log(`🏢 Total companies: ${totalCompanies}`);
|
||||
|
||||
if (totalSessions === 0) {
|
||||
console.log('\n💡 No sessions found. Run CSV refresh to import data:');
|
||||
console.log(' curl -X POST http://localhost:3000/api/admin/refresh-sessions');
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error checking database status:', error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// Run the script
|
||||
checkDatabaseStatus();
|
||||
@@ -1,69 +0,0 @@
|
||||
// Check why questions aren't being extracted properly
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function checkQuestionsIssue() {
|
||||
console.log('🔍 INVESTIGATING QUESTIONS EXTRACTION ISSUE\n');
|
||||
|
||||
// Find a session with questions stored
|
||||
const sessionWithQuestions = await prisma.session.findFirst({
|
||||
where: {
|
||||
processed: true,
|
||||
questions: { not: null }
|
||||
},
|
||||
include: { messages: true }
|
||||
});
|
||||
|
||||
if (sessionWithQuestions) {
|
||||
console.log('📋 SAMPLE SESSION WITH QUESTIONS:');
|
||||
console.log('Session ID:', sessionWithQuestions.id);
|
||||
console.log('Questions stored:', sessionWithQuestions.questions);
|
||||
console.log('Summary:', sessionWithQuestions.summary);
|
||||
console.log('Messages count:', sessionWithQuestions.messages.length);
|
||||
|
||||
console.log('\n💬 FIRST FEW MESSAGES:');
|
||||
sessionWithQuestions.messages.slice(0, 8).forEach((msg, i) => {
|
||||
console.log(` ${i+1}. [${msg.role}]: ${msg.content.substring(0, 150)}...`);
|
||||
});
|
||||
}
|
||||
|
||||
// Check sessions marked as invalid data
|
||||
const invalidSessions = await prisma.session.count({
|
||||
where: {
|
||||
processed: true,
|
||||
questions: '[]' // Empty questions array
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`\n⚠️ SESSIONS WITH EMPTY QUESTIONS: ${invalidSessions}`);
|
||||
|
||||
// Find a session with empty questions to analyze
|
||||
const emptyQuestionSession = await prisma.session.findFirst({
|
||||
where: {
|
||||
processed: true,
|
||||
questions: '[]'
|
||||
},
|
||||
include: { messages: true }
|
||||
});
|
||||
|
||||
if (emptyQuestionSession) {
|
||||
console.log('\n❌ SAMPLE SESSION WITH EMPTY QUESTIONS:');
|
||||
console.log('Session ID:', emptyQuestionSession.id);
|
||||
console.log('Questions stored:', emptyQuestionSession.questions);
|
||||
console.log('Summary:', emptyQuestionSession.summary);
|
||||
console.log('Messages count:', emptyQuestionSession.messages.length);
|
||||
|
||||
console.log('\n💬 MESSAGES FROM EMPTY QUESTION SESSION:');
|
||||
emptyQuestionSession.messages.slice(0, 8).forEach((msg, i) => {
|
||||
console.log(` ${i+1}. [${msg.role}]: ${msg.content.substring(0, 150)}...`);
|
||||
});
|
||||
}
|
||||
|
||||
console.log('\n🤖 CURRENT OPENAI MODEL: gpt-4-turbo');
|
||||
console.log('🎯 PROMPT INSTRUCTION: "Max 5 user questions in English"');
|
||||
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
|
||||
checkQuestionsIssue();
|
||||
@@ -1,76 +0,0 @@
|
||||
// Script to check what's in the transcript files
|
||||
// Usage: node scripts/check-transcript-content.js
|
||||
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function checkTranscriptContent() {
|
||||
try {
|
||||
// Get a few sessions without messages
|
||||
const sessions = await prisma.session.findMany({
|
||||
where: {
|
||||
AND: [{ fullTranscriptUrl: { not: null } }, { messages: { none: {} } }],
|
||||
},
|
||||
include: { company: true },
|
||||
take: 3,
|
||||
});
|
||||
|
||||
for (const session of sessions) {
|
||||
console.log(`\n📄 Checking session ${session.id}:`);
|
||||
console.log(` URL: ${session.fullTranscriptUrl}`);
|
||||
|
||||
try {
|
||||
const authHeader =
|
||||
session.company.csvUsername && session.company.csvPassword
|
||||
? "Basic " +
|
||||
Buffer.from(
|
||||
`${session.company.csvUsername}:${session.company.csvPassword}`
|
||||
).toString("base64")
|
||||
: undefined;
|
||||
|
||||
const response = await fetch(session.fullTranscriptUrl, {
|
||||
headers: authHeader ? { Authorization: authHeader } : {},
|
||||
timeout: 10000,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.log(` ❌ HTTP ${response.status}: ${response.statusText}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const content = await response.text();
|
||||
console.log(` 📏 Content length: ${content.length} characters`);
|
||||
|
||||
if (content.length === 0) {
|
||||
console.log(` ⚠️ Empty file`);
|
||||
} else if (content.length < 100) {
|
||||
console.log(` 📝 Full content: "${content}"`);
|
||||
} else {
|
||||
console.log(
|
||||
` 📝 First 200 chars: "${content.substring(0, 200)}..."`
|
||||
);
|
||||
}
|
||||
|
||||
// Check if it matches our expected format
|
||||
const lines = content.split("\n").filter((line) => line.trim());
|
||||
const formatMatches = lines.filter((line) =>
|
||||
line.match(/^\[([^\]]+)\]\s*([^:]+):\s*(.+)$/)
|
||||
);
|
||||
|
||||
console.log(
|
||||
` 🔍 Lines total: ${lines.length}, Format matches: ${formatMatches.length}`
|
||||
);
|
||||
} catch (error) {
|
||||
console.log(` ❌ Error: ${error.message}`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("❌ Error:", error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
checkTranscriptContent();
|
||||
@@ -1,34 +0,0 @@
|
||||
// Check sessions for transcript URLs
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function checkTranscriptUrls() {
|
||||
const sessions = await prisma.session.findMany({
|
||||
where: {
|
||||
messages: { none: {} },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
fullTranscriptUrl: true,
|
||||
}
|
||||
});
|
||||
|
||||
const withUrl = sessions.filter(s => s.fullTranscriptUrl);
|
||||
const withoutUrl = sessions.filter(s => !s.fullTranscriptUrl);
|
||||
|
||||
console.log(`\n📊 Transcript URL Status for Sessions without Messages:`);
|
||||
console.log(`✅ Sessions with transcript URL: ${withUrl.length}`);
|
||||
console.log(`❌ Sessions without transcript URL: ${withoutUrl.length}`);
|
||||
|
||||
if (withUrl.length > 0) {
|
||||
console.log(`\n🔍 Sample URLs:`);
|
||||
withUrl.slice(0, 3).forEach(s => {
|
||||
console.log(` ${s.id}: ${s.fullTranscriptUrl}`);
|
||||
});
|
||||
}
|
||||
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
|
||||
checkTranscriptUrls();
|
||||
@@ -1,144 +0,0 @@
|
||||
// Complete processing workflow - Fetches transcripts AND processes everything
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { processUnprocessedSessions } from '../lib/processingScheduler.ts';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function completeProcessingWorkflow() {
|
||||
try {
|
||||
console.log('🚀 COMPLETE PROCESSING WORKFLOW STARTED\n');
|
||||
|
||||
// Step 1: Check initial status
|
||||
console.log('📊 STEP 1: Initial Status Check');
|
||||
console.log('=' .repeat(50));
|
||||
await checkStatus();
|
||||
|
||||
// Step 2: Fetch missing transcripts
|
||||
console.log('\n📥 STEP 2: Fetching Missing Transcripts');
|
||||
console.log('=' .repeat(50));
|
||||
|
||||
const sessionsWithoutMessages = await prisma.session.count({
|
||||
where: {
|
||||
messages: { none: {} },
|
||||
fullTranscriptUrl: { not: null }
|
||||
}
|
||||
});
|
||||
|
||||
if (sessionsWithoutMessages > 0) {
|
||||
console.log(`🔍 Found ${sessionsWithoutMessages} sessions needing transcript fetch`);
|
||||
console.log('📥 Fetching transcripts...\n');
|
||||
|
||||
try {
|
||||
const { stdout } = await execAsync('node scripts/fetch-and-parse-transcripts.js');
|
||||
console.log(stdout);
|
||||
} catch (error) {
|
||||
console.error('❌ Error fetching transcripts:', error);
|
||||
}
|
||||
} else {
|
||||
console.log('✅ All sessions with transcript URLs already have messages');
|
||||
}
|
||||
|
||||
// Step 3: Process ALL unprocessed sessions
|
||||
console.log('\n🤖 STEP 3: AI Processing (Complete Batch Processing)');
|
||||
console.log('=' .repeat(50));
|
||||
|
||||
const unprocessedWithMessages = await prisma.session.count({
|
||||
where: {
|
||||
processed: false,
|
||||
messages: { some: {} }
|
||||
}
|
||||
});
|
||||
|
||||
if (unprocessedWithMessages > 0) {
|
||||
console.log(`🔄 Found ${unprocessedWithMessages} unprocessed sessions with messages`);
|
||||
console.log('🤖 Starting complete batch processing...\n');
|
||||
|
||||
const result = await processUnprocessedSessions(10, 3);
|
||||
|
||||
console.log('\n🎉 AI Processing Results:');
|
||||
console.log(` ✅ Successfully processed: ${result.totalProcessed}`);
|
||||
console.log(` ❌ Failed to process: ${result.totalFailed}`);
|
||||
console.log(` ⏱️ Total time: ${result.totalTime.toFixed(2)}s`);
|
||||
} else {
|
||||
console.log('✅ No unprocessed sessions with messages found');
|
||||
}
|
||||
|
||||
// Step 4: Continue fetching more transcripts if available
|
||||
console.log('\n🔄 STEP 4: Checking for More Transcripts');
|
||||
console.log('=' .repeat(50));
|
||||
|
||||
const remainingWithoutMessages = await prisma.session.count({
|
||||
where: {
|
||||
messages: { none: {} },
|
||||
fullTranscriptUrl: { not: null }
|
||||
}
|
||||
});
|
||||
|
||||
if (remainingWithoutMessages > 0) {
|
||||
console.log(`🔍 Found ${remainingWithoutMessages} more sessions needing transcripts`);
|
||||
console.log('📥 Fetching additional transcripts...\n');
|
||||
|
||||
try {
|
||||
const { stdout } = await execAsync('node scripts/fetch-and-parse-transcripts.js');
|
||||
console.log(stdout);
|
||||
|
||||
// Process the newly fetched sessions
|
||||
const newUnprocessed = await prisma.session.count({
|
||||
where: {
|
||||
processed: false,
|
||||
messages: { some: {} }
|
||||
}
|
||||
});
|
||||
|
||||
if (newUnprocessed > 0) {
|
||||
console.log(`\n🤖 Processing ${newUnprocessed} newly fetched sessions...\n`);
|
||||
const result = await processUnprocessedSessions(10, 3);
|
||||
console.log(`✅ Additional processing: ${result.totalProcessed} processed, ${result.totalFailed} failed`);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error fetching additional transcripts:', error);
|
||||
}
|
||||
} else {
|
||||
console.log('✅ No more sessions need transcript fetching');
|
||||
}
|
||||
|
||||
// Step 5: Final status
|
||||
console.log('\n📊 STEP 5: Final Status');
|
||||
console.log('=' .repeat(50));
|
||||
await checkStatus();
|
||||
|
||||
console.log('\n🎯 WORKFLOW COMPLETE!');
|
||||
console.log('✅ All available sessions have been processed');
|
||||
console.log('✅ System ready for new data');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error in complete workflow:', error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
async function checkStatus() {
|
||||
const totalSessions = await prisma.session.count();
|
||||
const processedSessions = await prisma.session.count({ where: { processed: true } });
|
||||
const unprocessedSessions = await prisma.session.count({ where: { processed: false } });
|
||||
const sessionsWithMessages = await prisma.session.count({
|
||||
where: { messages: { some: {} } }
|
||||
});
|
||||
const sessionsWithoutMessages = await prisma.session.count({
|
||||
where: { messages: { none: {} } }
|
||||
});
|
||||
|
||||
console.log(`📈 Total sessions: ${totalSessions}`);
|
||||
console.log(`✅ Processed sessions: ${processedSessions}`);
|
||||
console.log(`⏳ Unprocessed sessions: ${unprocessedSessions}`);
|
||||
console.log(`💬 Sessions with messages: ${sessionsWithMessages}`);
|
||||
console.log(`📄 Sessions without messages: ${sessionsWithoutMessages}`);
|
||||
}
|
||||
|
||||
// Run the complete workflow
|
||||
completeProcessingWorkflow();
|
||||
@@ -1,99 +0,0 @@
|
||||
// Complete workflow demonstration - Shows the full automated processing system
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { processUnprocessedSessions } from '../lib/processingScheduler.ts';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function demonstrateCompleteWorkflow() {
|
||||
try {
|
||||
console.log('🚀 COMPLETE AUTOMATED WORKFLOW DEMONSTRATION\n');
|
||||
|
||||
// Step 1: Check initial status
|
||||
console.log('📊 STEP 1: Initial Database Status');
|
||||
console.log('=' .repeat(50));
|
||||
await checkDatabaseStatus();
|
||||
|
||||
// Step 2: Fetch any missing transcripts
|
||||
console.log('\n📥 STEP 2: Fetching Missing Transcripts');
|
||||
console.log('=' .repeat(50));
|
||||
|
||||
const sessionsWithoutMessages = await prisma.session.count({
|
||||
where: {
|
||||
messages: { none: {} },
|
||||
fullTranscriptUrl: { not: null }
|
||||
}
|
||||
});
|
||||
|
||||
if (sessionsWithoutMessages > 0) {
|
||||
console.log(`Found ${sessionsWithoutMessages} sessions without messages but with transcript URLs`);
|
||||
console.log('💡 Run: node scripts/fetch-and-parse-transcripts.js');
|
||||
} else {
|
||||
console.log('✅ All sessions with transcript URLs already have messages');
|
||||
}
|
||||
|
||||
// Step 3: Process all unprocessed sessions
|
||||
console.log('\n🤖 STEP 3: Complete AI Processing (All Unprocessed Sessions)');
|
||||
console.log('=' .repeat(50));
|
||||
|
||||
const unprocessedCount = await prisma.session.count({
|
||||
where: {
|
||||
processed: false,
|
||||
messages: { some: {} }
|
||||
}
|
||||
});
|
||||
|
||||
if (unprocessedCount > 0) {
|
||||
console.log(`Found ${unprocessedCount} unprocessed sessions with messages`);
|
||||
console.log('🔄 Starting complete batch processing...\n');
|
||||
|
||||
const result = await processUnprocessedSessions(10, 3);
|
||||
|
||||
console.log('\n🎉 Processing Results:');
|
||||
console.log(` ✅ Successfully processed: ${result.totalProcessed}`);
|
||||
console.log(` ❌ Failed to process: ${result.totalFailed}`);
|
||||
console.log(` ⏱️ Total time: ${result.totalTime.toFixed(2)}s`);
|
||||
} else {
|
||||
console.log('✅ No unprocessed sessions found - all caught up!');
|
||||
}
|
||||
|
||||
// Step 4: Final status
|
||||
console.log('\n📊 STEP 4: Final Database Status');
|
||||
console.log('=' .repeat(50));
|
||||
await checkDatabaseStatus();
|
||||
|
||||
// Step 5: System summary
|
||||
console.log('\n🎯 STEP 5: Automated System Summary');
|
||||
console.log('=' .repeat(50));
|
||||
console.log('✅ HOURLY SCHEDULER: Processes new unprocessed sessions automatically');
|
||||
console.log('✅ DASHBOARD REFRESH: Triggers processing when refresh button is pressed');
|
||||
console.log('✅ BATCH PROCESSING: Processes ALL unprocessed sessions until completion');
|
||||
console.log('✅ QUALITY VALIDATION: Filters out low-quality sessions automatically');
|
||||
console.log('✅ COMPLETE AUTOMATION: No manual intervention needed for ongoing operations');
|
||||
|
||||
console.log('\n🚀 SYSTEM READY FOR PRODUCTION!');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error in workflow demonstration:', error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
async function checkDatabaseStatus() {
|
||||
const totalSessions = await prisma.session.count();
|
||||
const processedSessions = await prisma.session.count({ where: { processed: true } });
|
||||
const unprocessedSessions = await prisma.session.count({ where: { processed: false } });
|
||||
const sessionsWithMessages = await prisma.session.count({
|
||||
where: { messages: { some: {} } }
|
||||
});
|
||||
const companies = await prisma.company.count();
|
||||
|
||||
console.log(`📈 Total sessions: ${totalSessions}`);
|
||||
console.log(`✅ Processed sessions: ${processedSessions}`);
|
||||
console.log(`⏳ Unprocessed sessions: ${unprocessedSessions}`);
|
||||
console.log(`💬 Sessions with messages: ${sessionsWithMessages}`);
|
||||
console.log(`🏢 Total companies: ${companies}`);
|
||||
}
|
||||
|
||||
// Run the demonstration
|
||||
demonstrateCompleteWorkflow();
|
||||
@@ -1,53 +0,0 @@
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import bcrypt from 'bcryptjs';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function createAdminUser() {
|
||||
try {
|
||||
// Check if user exists
|
||||
const existingUser = await prisma.user.findUnique({
|
||||
where: { email: 'admin@example.com' }
|
||||
});
|
||||
|
||||
if (existingUser) {
|
||||
console.log('✅ User already exists:', existingUser.email);
|
||||
console.log('Password hash:', existingUser.password);
|
||||
return;
|
||||
}
|
||||
|
||||
// First, ensure we have a company
|
||||
let company = await prisma.company.findFirst();
|
||||
if (!company) {
|
||||
company = await prisma.company.create({
|
||||
data: {
|
||||
name: 'Demo Company',
|
||||
csvUrl: 'https://example.com/demo.csv',
|
||||
}
|
||||
});
|
||||
console.log('✅ Created demo company:', company.name);
|
||||
}
|
||||
|
||||
// Create user
|
||||
const hashedPassword = await bcrypt.hash('admin123', 10);
|
||||
const user = await prisma.user.create({
|
||||
data: {
|
||||
email: 'admin@example.com',
|
||||
password: hashedPassword,
|
||||
role: 'admin',
|
||||
companyId: company.id,
|
||||
}
|
||||
});
|
||||
|
||||
console.log('✅ User created successfully:', user.email);
|
||||
console.log('Password hash:', user.password);
|
||||
console.log('Role:', user.role);
|
||||
console.log('Company:', company.name);
|
||||
} catch (error) {
|
||||
console.error('❌ Error creating user:', error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
createAdminUser();
|
||||
184
scripts/d1-manager.js
Normal file
184
scripts/d1-manager.js
Normal file
@@ -0,0 +1,184 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Comprehensive D1 Database Management Script
|
||||
*
|
||||
* Usage Examples:
|
||||
* node scripts/d1-manager.js tables
|
||||
* node scripts/d1-manager.js schema Company
|
||||
* node scripts/d1-manager.js count User
|
||||
* node scripts/d1-manager.js query "SELECT * FROM User LIMIT 5"
|
||||
* node scripts/d1-manager.js backup
|
||||
* node scripts/d1-manager.js --remote query "SELECT COUNT(*) FROM Session"
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
import { writeFileSync, mkdirSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
const DB_NAME = 'd1-notso-livedash';
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
// Parse flags
|
||||
const isRemote = args.includes('--remote');
|
||||
const filteredArgs = args.filter(arg => !arg.startsWith('--'));
|
||||
|
||||
if (filteredArgs.length === 0) {
|
||||
showHelp();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const command = filteredArgs[ 0 ];
|
||||
const params = filteredArgs.slice(1);
|
||||
|
||||
function showHelp() {
|
||||
console.log(`
|
||||
🗄️ D1 Database Manager for ${DB_NAME}
|
||||
|
||||
Usage: node scripts/d1-manager.js [--remote] <command> [params...]
|
||||
|
||||
Commands:
|
||||
info Show database information
|
||||
tables List all tables
|
||||
schema <table> Show table schema
|
||||
count <table> Count rows in table
|
||||
query "<sql>" Execute custom SQL query
|
||||
backup [filename] Export database to SQL file
|
||||
backup-schema Export just the schema
|
||||
recent-logs Show recent query activity
|
||||
|
||||
Flags:
|
||||
--remote Execute against remote D1 (production)
|
||||
|
||||
Examples:
|
||||
node scripts/d1-manager.js tables
|
||||
node scripts/d1-manager.js schema User
|
||||
node scripts/d1-manager.js count Company
|
||||
node scripts/d1-manager.js query "SELECT * FROM User WHERE role = 'admin'"
|
||||
node scripts/d1-manager.js backup
|
||||
node scripts/d1-manager.js --remote info
|
||||
`);
|
||||
}
|
||||
|
||||
function execute(sql, silent = false) {
|
||||
const remoteFlag = isRemote ? '--remote' : '';
|
||||
const cmd = `npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "${sql}"`;
|
||||
|
||||
if (!silent) {
|
||||
console.log(`🔍 Executing${isRemote ? ' (remote)' : ' (local)'}: ${sql}\\n`);
|
||||
}
|
||||
|
||||
try {
|
||||
return execSync(cmd, { encoding: 'utf8' });
|
||||
} catch (error) {
|
||||
console.error('❌ Query failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function wranglerCommand(subcommand, silent = false) {
|
||||
const remoteFlag = isRemote ? '--remote' : '';
|
||||
const cmd = `npx wrangler d1 ${subcommand} ${DB_NAME} ${remoteFlag}`;
|
||||
|
||||
if (!silent) {
|
||||
console.log(`📊 Running: ${cmd}\\n`);
|
||||
}
|
||||
|
||||
try {
|
||||
return execSync(cmd, { stdio: 'inherit' });
|
||||
} catch (error) {
|
||||
console.error('❌ Command failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
switch (command) {
|
||||
case 'info':
|
||||
wranglerCommand('info');
|
||||
break;
|
||||
|
||||
case 'tables':
|
||||
console.log('📋 Listing all tables:\\n');
|
||||
execute("SELECT name, type FROM sqlite_master WHERE type IN ('table', 'view') AND name NOT LIKE 'sqlite_%' ORDER BY name;");
|
||||
break;
|
||||
|
||||
case 'schema':
|
||||
if (!params[ 0 ]) {
|
||||
console.error('❌ Please specify a table name');
|
||||
console.log('Usage: node scripts/d1-manager.js schema <table_name>');
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`🏗️ Schema for table '${params[ 0 ]}':\\n`);
|
||||
execute(`PRAGMA table_info(${params[ 0 ]});`);
|
||||
break;
|
||||
|
||||
case 'count':
|
||||
if (!params[ 0 ]) {
|
||||
console.error('❌ Please specify a table name');
|
||||
console.log('Usage: node scripts/d1-manager.js count <table_name>');
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`🔢 Row count for table '${params[ 0 ]}':\\n`);
|
||||
execute(`SELECT COUNT(*) as row_count FROM ${params[ 0 ]};`);
|
||||
break;
|
||||
|
||||
case 'query':
|
||||
if (!params[ 0 ]) {
|
||||
console.error('❌ Please specify a SQL query');
|
||||
console.log('Usage: node scripts/d1-manager.js query "SELECT * FROM table"');
|
||||
process.exit(1);
|
||||
}
|
||||
execute(params[ 0 ]);
|
||||
break;
|
||||
|
||||
case 'backup':
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
|
||||
const filename = params[ 0 ] || `backup_${timestamp}.sql`;
|
||||
|
||||
try {
|
||||
mkdirSync('backups', { recursive: true });
|
||||
} catch (e) {
|
||||
// Directory might already exist
|
||||
}
|
||||
|
||||
const backupPath = join('backups', filename);
|
||||
console.log(`💾 Creating backup: ${backupPath}\\n`);
|
||||
wranglerCommand(`export --output ${backupPath}`);
|
||||
console.log(`\\n✅ Backup created successfully: ${backupPath}`);
|
||||
break;
|
||||
|
||||
case 'backup-schema':
|
||||
try {
|
||||
mkdirSync('backups', { recursive: true });
|
||||
} catch (e) {
|
||||
// Directory might already exist
|
||||
}
|
||||
|
||||
console.log('📜 Exporting schema only...\\n');
|
||||
wranglerCommand('export --no-data --output backups/schema.sql');
|
||||
console.log('\\n✅ Schema exported to backups/schema.sql');
|
||||
break;
|
||||
|
||||
case 'recent-logs':
|
||||
console.log('📊 Recent database activity:\\n');
|
||||
try {
|
||||
wranglerCommand('insights');
|
||||
} catch (error) {
|
||||
console.log('ℹ️ Insights not available for this database');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'all-tables-info':
|
||||
console.log('📊 Information about all tables:\\n');
|
||||
const tables = [ 'Company', 'User', 'Session' ];
|
||||
for (const table of tables) {
|
||||
console.log(`\\n🏷️ Table: ${table}`);
|
||||
console.log('─'.repeat(50));
|
||||
execute(`SELECT COUNT(*) as row_count FROM ${table};`);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
console.error(`❌ Unknown command: ${command}`);
|
||||
showHelp();
|
||||
process.exit(1);
|
||||
}
|
||||
36
scripts/d1-query.js
Normal file
36
scripts/d1-query.js
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Simple D1 query helper script
|
||||
* Usage: node scripts/d1-query.js "SELECT * FROM User LIMIT 5"
|
||||
* Usage: node scripts/d1-query.js --remote "SELECT COUNT(*) FROM Company"
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.log('Usage: node scripts/d1-query.js [--remote] "SQL_QUERY"');
|
||||
console.log('Examples:');
|
||||
console.log(' node scripts/d1-query.js "SELECT * FROM User LIMIT 5"');
|
||||
console.log(' node scripts/d1-query.js --remote "SELECT COUNT(*) FROM Company"');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const isRemote = args.includes('--remote');
|
||||
const query = args[ args.length - 1 ];
|
||||
|
||||
if (!query || query.startsWith('--')) {
|
||||
console.error('Error: Please provide a SQL query');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const remoteFlag = isRemote ? '--remote' : '';
|
||||
const command = `npx wrangler d1 execute d1-notso-livedash ${remoteFlag} --command "${query}"`;
|
||||
|
||||
try {
|
||||
console.log(`🔍 Executing${isRemote ? ' (remote)' : ' (local)'}: ${query}\n`);
|
||||
execSync(command, { stdio: 'inherit' });
|
||||
} catch (error) {
|
||||
console.error('Query failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
89
scripts/d1.js
Normal file
89
scripts/d1.js
Normal file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Simple D1 Database CLI
|
||||
* Usage: node scripts/d1.js <command> [args...]
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
const DB_NAME = 'd1-notso-livedash';
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.log(`
|
||||
🗄️ Simple D1 CLI for ${DB_NAME}
|
||||
|
||||
Usage: node scripts/d1.js <command> [args...]
|
||||
|
||||
Commands:
|
||||
list List databases
|
||||
info Show database info
|
||||
tables List all tables
|
||||
schema <table> Show table schema
|
||||
query "<sql>" Execute SQL query
|
||||
export [file] Export database
|
||||
|
||||
Add --remote flag for production database
|
||||
|
||||
Examples:
|
||||
node scripts/d1.js tables
|
||||
node scripts/d1.js schema User
|
||||
node scripts/d1.js query "SELECT COUNT(*) FROM Company"
|
||||
node scripts/d1.js --remote info
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const isRemote = args.includes('--remote');
|
||||
const filteredArgs = args.filter(arg => !arg.startsWith('--'));
|
||||
const [ command, ...params ] = filteredArgs;
|
||||
const remoteFlag = isRemote ? '--remote' : '';
|
||||
|
||||
function run(cmd) {
|
||||
try {
|
||||
console.log(`💫 ${cmd}`);
|
||||
execSync(cmd, { stdio: 'inherit' });
|
||||
} catch (error) {
|
||||
console.error('❌ Command failed');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
switch (command) {
|
||||
case 'list':
|
||||
run('npx wrangler d1 list');
|
||||
break;
|
||||
|
||||
case 'info':
|
||||
run(`npx wrangler d1 info ${DB_NAME} ${remoteFlag}`);
|
||||
break;
|
||||
|
||||
case 'tables':
|
||||
run(`npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"`);
|
||||
break;
|
||||
|
||||
case 'schema':
|
||||
if (!params[ 0 ]) {
|
||||
console.error('❌ Please specify table name');
|
||||
process.exit(1);
|
||||
}
|
||||
run(`npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "PRAGMA table_info(${params[ 0 ]})"`);
|
||||
break;
|
||||
|
||||
case 'query':
|
||||
if (!params[ 0 ]) {
|
||||
console.error('❌ Please specify SQL query');
|
||||
process.exit(1);
|
||||
}
|
||||
run(`npx wrangler d1 execute ${DB_NAME} ${remoteFlag} --command "${params[ 0 ]}"`);
|
||||
break;
|
||||
|
||||
case 'export':
|
||||
const filename = params[ 0 ] || `backup_${new Date().toISOString().slice(0, 10)}.sql`;
|
||||
run(`npx wrangler d1 export ${DB_NAME} ${remoteFlag} --output ${filename}`);
|
||||
break;
|
||||
|
||||
default:
|
||||
console.error(`❌ Unknown command: ${command}`);
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -1,197 +0,0 @@
|
||||
// Script to fetch transcripts and parse them into messages
|
||||
// Usage: node scripts/fetch-and-parse-transcripts.js
|
||||
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
/**
|
||||
* Fetches transcript content from a URL
|
||||
*/
|
||||
async function fetchTranscriptContent(url, username, password) {
|
||||
try {
|
||||
const authHeader =
|
||||
username && password
|
||||
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||
: undefined;
|
||||
|
||||
const response = await fetch(url, {
|
||||
headers: authHeader ? { Authorization: authHeader } : {},
|
||||
timeout: 10000,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.log(
|
||||
`❌ Failed to fetch ${url}: ${response.status} ${response.statusText}`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
return await response.text();
|
||||
} catch (error) {
|
||||
console.log(`❌ Error fetching ${url}: ${error.message}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses transcript content into messages
|
||||
*/
|
||||
function parseTranscriptToMessages(transcript, sessionId) {
|
||||
if (!transcript || transcript.trim() === "") {
|
||||
return [];
|
||||
}
|
||||
|
||||
const lines = transcript.split("\n").filter((line) => line.trim());
|
||||
const messages = [];
|
||||
let messageOrder = 0;
|
||||
let currentTimestamp = new Date();
|
||||
|
||||
for (const line of lines) {
|
||||
// Try format 1: [DD-MM-YYYY HH:MM:SS] Role: Content
|
||||
const timestampMatch = line.match(/^\[([^\]]+)\]\s*([^:]+):\s*(.+)$/);
|
||||
|
||||
if (timestampMatch) {
|
||||
const [, timestamp, role, content] = timestampMatch;
|
||||
|
||||
// Parse timestamp (DD-MM-YYYY HH:MM:SS)
|
||||
const dateMatch = timestamp.match(
|
||||
/^(\d{1,2})-(\d{1,2})-(\d{4}) (\d{1,2}):(\d{1,2}):(\d{1,2})$/
|
||||
);
|
||||
let parsedTimestamp = new Date();
|
||||
|
||||
if (dateMatch) {
|
||||
const [, day, month, year, hour, minute, second] = dateMatch;
|
||||
parsedTimestamp = new Date(
|
||||
parseInt(year),
|
||||
parseInt(month) - 1, // Month is 0-indexed
|
||||
parseInt(day),
|
||||
parseInt(hour),
|
||||
parseInt(minute),
|
||||
parseInt(second)
|
||||
);
|
||||
}
|
||||
|
||||
messages.push({
|
||||
sessionId,
|
||||
role: role.trim().toLowerCase(),
|
||||
content: content.trim(),
|
||||
timestamp: parsedTimestamp,
|
||||
order: messageOrder++,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Try format 2: Role: Content (simple format)
|
||||
const simpleMatch = line.match(/^([^:]+):\s*(.+)$/);
|
||||
|
||||
if (simpleMatch) {
|
||||
const [, role, content] = simpleMatch;
|
||||
|
||||
// Use incremental timestamps (add 1 minute per message)
|
||||
currentTimestamp = new Date(currentTimestamp.getTime() + 60000);
|
||||
|
||||
messages.push({
|
||||
sessionId,
|
||||
role: role.trim().toLowerCase(),
|
||||
content: content.trim(),
|
||||
timestamp: new Date(currentTimestamp),
|
||||
order: messageOrder++,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process sessions without messages
|
||||
*/
|
||||
async function fetchAndParseTranscripts() {
|
||||
try {
|
||||
console.log("🔍 Finding sessions without messages...\n");
|
||||
|
||||
// Get sessions that have fullTranscriptUrl but no messages
|
||||
const sessionsWithoutMessages = await prisma.session.findMany({
|
||||
where: {
|
||||
AND: [
|
||||
{ fullTranscriptUrl: { not: null } },
|
||||
{ messages: { none: {} } }, // No messages
|
||||
],
|
||||
},
|
||||
include: {
|
||||
company: true,
|
||||
},
|
||||
take: 20, // Process 20 at a time to avoid overwhelming
|
||||
});
|
||||
|
||||
if (sessionsWithoutMessages.length === 0) {
|
||||
console.log(
|
||||
"✅ All sessions with transcript URLs already have messages!"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`📥 Found ${sessionsWithoutMessages.length} sessions to process\n`
|
||||
);
|
||||
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (const session of sessionsWithoutMessages) {
|
||||
console.log(`📄 Processing session ${session.id.substring(0, 8)}...`);
|
||||
|
||||
try {
|
||||
// Fetch transcript content
|
||||
const transcriptContent = await fetchTranscriptContent(
|
||||
session.fullTranscriptUrl,
|
||||
session.company.csvUsername,
|
||||
session.company.csvPassword
|
||||
);
|
||||
|
||||
if (!transcriptContent) {
|
||||
console.log(` ⚠️ No transcript content available`);
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse transcript into messages
|
||||
const messages = parseTranscriptToMessages(
|
||||
transcriptContent,
|
||||
session.id
|
||||
);
|
||||
|
||||
if (messages.length === 0) {
|
||||
console.log(` ⚠️ No messages found in transcript`);
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Save messages to database
|
||||
await prisma.message.createMany({
|
||||
data: messages,
|
||||
});
|
||||
|
||||
console.log(` ✅ Added ${messages.length} messages`);
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.log(` ❌ Error: ${error.message}`);
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n📊 Results:`);
|
||||
console.log(` ✅ Successfully processed: ${successCount} sessions`);
|
||||
console.log(` ❌ Failed to process: ${errorCount} sessions`);
|
||||
console.log(
|
||||
`\n💡 Now you can run the processing scheduler to analyze these sessions!`
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("❌ Error:", error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
fetchAndParseTranscripts();
|
||||
@@ -16,6 +16,7 @@ async function main() {
|
||||
select: {
|
||||
id: true,
|
||||
fullTranscriptUrl: true,
|
||||
companyId: true,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -28,47 +29,94 @@ async function main() {
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
// Group sessions by company to fetch credentials once per company
|
||||
const sessionsByCompany = new Map<string, typeof sessionsToUpdate>();
|
||||
for (const session of sessionsToUpdate) {
|
||||
if (!session.fullTranscriptUrl) {
|
||||
// Should not happen due to query, but good for type safety
|
||||
console.warn(`Session ${session.id} has no fullTranscriptUrl, skipping.`);
|
||||
if (!sessionsByCompany.has(session.companyId)) {
|
||||
sessionsByCompany.set(session.companyId, []);
|
||||
}
|
||||
sessionsByCompany.get(session.companyId)!.push(session);
|
||||
}
|
||||
|
||||
for (const [companyId, companySessions] of Array.from(
|
||||
sessionsByCompany.entries()
|
||||
)) {
|
||||
// Fetch company credentials once per company
|
||||
const company = await prisma.company.findUnique({
|
||||
where: { id: companyId },
|
||||
select: {
|
||||
csvUsername: true,
|
||||
csvPassword: true,
|
||||
name: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!company) {
|
||||
console.warn(`Company ${companyId} not found, skipping sessions.`);
|
||||
errorCount += companySessions.length;
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Fetching transcript for session ${session.id} from ${session.fullTranscriptUrl}...`
|
||||
`Processing ${companySessions.length} sessions for company: ${company.name}`
|
||||
);
|
||||
try {
|
||||
const response = await fetch(session.fullTranscriptUrl);
|
||||
if (!response.ok) {
|
||||
console.error(
|
||||
`Failed to fetch transcript for session ${session.id}: ${response.status} ${response.statusText}`
|
||||
|
||||
for (const session of companySessions) {
|
||||
if (!session.fullTranscriptUrl) {
|
||||
// Should not happen due to query, but good for type safety
|
||||
console.warn(
|
||||
`Session ${session.id} has no fullTranscriptUrl, skipping.`
|
||||
);
|
||||
const errorBody = await response.text();
|
||||
console.error(`Error details: ${errorBody.substring(0, 500)}`); // Log first 500 chars of error
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const transcriptText = await response.text();
|
||||
|
||||
if (transcriptText.trim() === "") {
|
||||
console.warn(
|
||||
`Fetched empty transcript for session ${session.id}. Storing as empty string.`
|
||||
);
|
||||
}
|
||||
|
||||
await prisma.session.update({
|
||||
where: { id: session.id },
|
||||
data: { transcriptContent: transcriptText },
|
||||
});
|
||||
console.log(
|
||||
`Successfully fetched and stored transcript for session ${session.id}.`
|
||||
`Fetching transcript for session ${session.id} from ${session.fullTranscriptUrl}...`
|
||||
);
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error(`Error processing session ${session.id}:`, error);
|
||||
errorCount++;
|
||||
try {
|
||||
// Prepare authentication if credentials are available
|
||||
const authHeader =
|
||||
company.csvUsername && company.csvPassword
|
||||
? "Basic " +
|
||||
Buffer.from(
|
||||
`${company.csvUsername}:${company.csvPassword}`
|
||||
).toString("base64")
|
||||
: undefined;
|
||||
|
||||
const response = await fetch(session.fullTranscriptUrl, {
|
||||
headers: authHeader ? { Authorization: authHeader } : {},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.error(
|
||||
`Failed to fetch transcript for session ${session.id}: ${response.status} ${response.statusText}`
|
||||
);
|
||||
const errorBody = await response.text();
|
||||
console.error(`Error details: ${errorBody.substring(0, 500)}`); // Log first 500 chars of error
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const transcriptText = await response.text();
|
||||
|
||||
if (transcriptText.trim() === "") {
|
||||
console.warn(
|
||||
`Fetched empty transcript for session ${session.id}. Storing as empty string.`
|
||||
);
|
||||
}
|
||||
|
||||
await prisma.session.update({
|
||||
where: { id: session.id },
|
||||
data: { transcriptContent: transcriptText },
|
||||
});
|
||||
console.log(
|
||||
`Successfully fetched and stored transcript for session ${session.id}.`
|
||||
);
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error(`Error processing session ${session.id}:`, error);
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
// Simple script to test the manual processing trigger
|
||||
// Usage: node scripts/manual-trigger-test.js
|
||||
|
||||
import fetch from "node-fetch";
|
||||
|
||||
async function testManualTrigger() {
|
||||
try {
|
||||
console.log("Testing manual processing trigger...");
|
||||
|
||||
const response = await fetch(
|
||||
"http://localhost:3000/api/admin/trigger-processing",
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
// Note: In a real scenario, you'd need to include authentication cookies
|
||||
// For testing, you might need to login first and copy the session cookie
|
||||
},
|
||||
body: JSON.stringify({
|
||||
batchSize: 5, // Process max 5 sessions
|
||||
maxConcurrency: 3, // Use 3 concurrent workers
|
||||
}),
|
||||
}
|
||||
);
|
||||
|
||||
const result = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
console.log("✅ Manual trigger successful:");
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
} else {
|
||||
console.log("❌ Manual trigger failed:");
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("❌ Error testing manual trigger:", error.message);
|
||||
}
|
||||
}
|
||||
|
||||
testManualTrigger();
|
||||
@@ -1,233 +0,0 @@
|
||||
// Manual trigger scripts for both schedulers
|
||||
import { fetchAndStoreSessionsForAllCompanies } from "../lib/csvFetcher.js";
|
||||
import { processAllUnparsedTranscripts } from "../lib/transcriptParser.js";
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import fetch from "node-fetch";
|
||||
import { readFileSync } from "fs";
|
||||
import { fileURLToPath } from "url";
|
||||
import { dirname, join } from "path";
|
||||
|
||||
// Load environment variables from .env.local
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const envPath = join(__dirname, "..", ".env.local");
|
||||
|
||||
try {
|
||||
const envFile = readFileSync(envPath, "utf8");
|
||||
const envVars = envFile
|
||||
.split("\n")
|
||||
.filter((line) => line.trim() && !line.startsWith("#"));
|
||||
|
||||
envVars.forEach((line) => {
|
||||
const [key, ...valueParts] = line.split("=");
|
||||
if (key && valueParts.length > 0) {
|
||||
const value = valueParts.join("=").trim();
|
||||
if (!process.env[key.trim()]) {
|
||||
process.env[key.trim()] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log("✅ Environment variables loaded from .env.local");
|
||||
} catch (error) {
|
||||
console.warn("⚠️ Could not load .env.local file:", error.message);
|
||||
}
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
/**
|
||||
* Manually trigger the session refresh scheduler
|
||||
*/
|
||||
async function triggerSessionRefresh() {
|
||||
console.log("=== Manual Session Refresh Trigger ===");
|
||||
try {
|
||||
await fetchAndStoreSessionsForAllCompanies();
|
||||
console.log("✅ Session refresh completed successfully");
|
||||
} catch (error) {
|
||||
console.error("❌ Session refresh failed:", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually trigger the processing scheduler
|
||||
*/
|
||||
async function triggerProcessingScheduler() {
|
||||
console.log("=== Manual Processing Scheduler Trigger ===");
|
||||
|
||||
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||
if (!OPENAI_API_KEY) {
|
||||
console.error("❌ OPENAI_API_KEY environment variable is not set");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Find sessions that need processing
|
||||
const sessionsToProcess = await prisma.session.findMany({
|
||||
where: {
|
||||
AND: [
|
||||
{ messages: { some: {} } },
|
||||
{
|
||||
OR: [{ processed: false }, { processed: null }],
|
||||
},
|
||||
],
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
processed: true,
|
||||
},
|
||||
take: 5, // Process 5 sessions for manual testing
|
||||
});
|
||||
|
||||
console.log(`Found ${sessionsToProcess.length} sessions to process:`);
|
||||
sessionsToProcess.forEach((session) => {
|
||||
console.log(`- Session ${session.id}: processed=${session.processed}`);
|
||||
});
|
||||
|
||||
if (sessionsToProcess.length === 0) {
|
||||
console.log("✅ No sessions found requiring processing");
|
||||
return;
|
||||
}
|
||||
|
||||
// Import and run the processing function
|
||||
const { processUnprocessedSessions } = await import(
|
||||
"../lib/processingScheduler.js"
|
||||
);
|
||||
await processUnprocessedSessions();
|
||||
|
||||
console.log("✅ Processing scheduler completed");
|
||||
} catch (error) {
|
||||
console.error("❌ Processing scheduler failed:", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually trigger transcript parsing
|
||||
*/
|
||||
async function triggerTranscriptParsing() {
|
||||
console.log("=== Manual Transcript Parsing Trigger ===");
|
||||
try {
|
||||
const result = await processAllUnparsedTranscripts();
|
||||
console.log(
|
||||
`✅ Transcript parsing completed: ${result.processed} processed, ${result.errors} errors`
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("❌ Transcript parsing failed:", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show current processing status
|
||||
*/
|
||||
async function showProcessingStatus() {
|
||||
console.log("=== Processing Status ===");
|
||||
|
||||
try {
|
||||
const totalSessions = await prisma.session.count();
|
||||
const processedSessions = await prisma.session.count({
|
||||
where: { processed: true },
|
||||
});
|
||||
const unprocessedSessions = await prisma.session.count({
|
||||
where: {
|
||||
OR: [{ processed: false }, { processed: null }],
|
||||
},
|
||||
});
|
||||
const withMessages = await prisma.session.count({
|
||||
where: {
|
||||
messages: {
|
||||
some: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
const readyForProcessing = await prisma.session.count({
|
||||
where: {
|
||||
AND: [
|
||||
{ messages: { some: {} } },
|
||||
{
|
||||
OR: [{ processed: false }, { processed: null }],
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`📊 Total sessions: ${totalSessions}`);
|
||||
console.log(`✅ Processed sessions: ${processedSessions}`);
|
||||
console.log(`⏳ Unprocessed sessions: ${unprocessedSessions}`);
|
||||
console.log(`📄 Sessions with messages: ${withMessages}`);
|
||||
console.log(`🔄 Ready for processing: ${readyForProcessing}`);
|
||||
|
||||
// Show some examples of unprocessed sessions
|
||||
if (readyForProcessing > 0) {
|
||||
console.log("\n📋 Sample unprocessed sessions:");
|
||||
const samples = await prisma.session.findMany({
|
||||
where: {
|
||||
AND: [
|
||||
{ messages: { some: {} } },
|
||||
{
|
||||
OR: [{ processed: false }, { processed: null }],
|
||||
},
|
||||
],
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
processed: true,
|
||||
startTime: true,
|
||||
},
|
||||
take: 3,
|
||||
});
|
||||
|
||||
samples.forEach((session) => {
|
||||
console.log(
|
||||
`- ${session.id} (${session.startTime.toISOString()}) - processed: ${session.processed}`
|
||||
);
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("❌ Failed to get processing status:", error);
|
||||
}
|
||||
}
|
||||
|
||||
// Main execution based on command line argument
|
||||
const command = process.argv[2];
|
||||
|
||||
switch (command) {
|
||||
case "refresh":
|
||||
await triggerSessionRefresh();
|
||||
break;
|
||||
case "process":
|
||||
await triggerProcessingScheduler();
|
||||
break;
|
||||
case "parse":
|
||||
await triggerTranscriptParsing();
|
||||
break;
|
||||
case "status":
|
||||
await showProcessingStatus();
|
||||
break;
|
||||
case "both":
|
||||
await triggerSessionRefresh();
|
||||
console.log("\n" + "=".repeat(50) + "\n");
|
||||
await triggerProcessingScheduler();
|
||||
break;
|
||||
case "all":
|
||||
await triggerSessionRefresh();
|
||||
console.log("\n" + "=".repeat(50) + "\n");
|
||||
await triggerTranscriptParsing();
|
||||
console.log("\n" + "=".repeat(50) + "\n");
|
||||
await triggerProcessingScheduler();
|
||||
break;
|
||||
default:
|
||||
console.log("Usage: node scripts/manual-triggers.js [command]");
|
||||
console.log("Commands:");
|
||||
console.log(
|
||||
" refresh - Trigger session refresh (fetch new sessions from CSV)"
|
||||
);
|
||||
console.log(" parse - Parse transcripts into structured messages");
|
||||
console.log(
|
||||
" process - Trigger processing scheduler (process unprocessed sessions)"
|
||||
);
|
||||
console.log(" status - Show current processing status");
|
||||
console.log(" both - Run both refresh and processing");
|
||||
console.log(" all - Run refresh, parse, and processing in sequence");
|
||||
break;
|
||||
}
|
||||
|
||||
await prisma.$disconnect();
|
||||
@@ -1,298 +0,0 @@
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||
const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
|
||||
|
||||
// Define the expected response structure from OpenAI
|
||||
interface OpenAIProcessedData {
|
||||
language: string;
|
||||
sentiment: "positive" | "neutral" | "negative";
|
||||
escalated: boolean;
|
||||
forwarded_hr: boolean;
|
||||
category: string;
|
||||
questions: string | string[];
|
||||
summary: string;
|
||||
tokens: number;
|
||||
tokens_eur: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a session transcript using OpenAI API
|
||||
* @param sessionId The session ID
|
||||
* @param transcript The transcript content to process
|
||||
* @returns Processed data from OpenAI
|
||||
*/
|
||||
async function processTranscriptWithOpenAI(
|
||||
sessionId: string,
|
||||
transcript: string
|
||||
): Promise<OpenAIProcessedData> {
|
||||
if (!OPENAI_API_KEY) {
|
||||
throw new Error("OPENAI_API_KEY environment variable is not set");
|
||||
}
|
||||
|
||||
// Create a system message with instructions
|
||||
const systemMessage = `
|
||||
You are an AI assistant tasked with analyzing chat transcripts.
|
||||
Extract the following information from the transcript:
|
||||
1. The primary language used by the user (ISO 639-1 code)
|
||||
2. Overall sentiment (positive, neutral, or negative)
|
||||
3. Whether the conversation was escalated
|
||||
4. Whether HR contact was mentioned or provided
|
||||
5. The best-fitting category for the conversation from this list:
|
||||
- Schedule & Hours
|
||||
- Leave & Vacation
|
||||
- Sick Leave & Recovery
|
||||
- Salary & Compensation
|
||||
- Contract & Hours
|
||||
- Onboarding
|
||||
- Offboarding
|
||||
- Workwear & Staff Pass
|
||||
- Team & Contacts
|
||||
- Personal Questions
|
||||
- Access & Login
|
||||
- Social questions
|
||||
- Unrecognized / Other
|
||||
6. A single question or an array of simplified questions asked by the user formulated in English
|
||||
7. A brief summary of the conversation (10-300 characters)
|
||||
8. The number of tokens used for the API call
|
||||
9. The cost of the API call in EUR
|
||||
|
||||
Return the data in JSON format matching this schema:
|
||||
{
|
||||
"language": "ISO 639-1 code",
|
||||
"sentiment": "positive|neutral|negative",
|
||||
"escalated": boolean,
|
||||
"forwarded_hr": boolean,
|
||||
"category": "one of the categories listed above",
|
||||
"questions": null, or array of questions,
|
||||
"summary": "brief summary",
|
||||
"tokens": number,
|
||||
"tokens_eur": number
|
||||
}
|
||||
`;
|
||||
|
||||
try {
|
||||
const response = await fetch(OPENAI_API_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${OPENAI_API_KEY}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: "gpt-4-turbo",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: systemMessage,
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: transcript,
|
||||
},
|
||||
],
|
||||
temperature: 0.3, // Lower temperature for more consistent results
|
||||
response_format: { type: "json_object" },
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`OpenAI API error: ${response.status} - ${errorText}`);
|
||||
}
|
||||
|
||||
const data = (await response.json()) as any;
|
||||
const processedData = JSON.parse(data.choices[0].message.content);
|
||||
|
||||
// Validate the response against our expected schema
|
||||
validateOpenAIResponse(processedData);
|
||||
|
||||
return processedData;
|
||||
} catch (error) {
|
||||
console.error(`Error processing transcript with OpenAI:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates the OpenAI response against our expected schema
|
||||
* @param data The data to validate
|
||||
*/
|
||||
function validateOpenAIResponse(
|
||||
data: any
|
||||
): asserts data is OpenAIProcessedData {
|
||||
// Check required fields
|
||||
const requiredFields = [
|
||||
"language",
|
||||
"sentiment",
|
||||
"escalated",
|
||||
"forwarded_hr",
|
||||
"category",
|
||||
"questions",
|
||||
"summary",
|
||||
"tokens",
|
||||
"tokens_eur",
|
||||
];
|
||||
|
||||
for (const field of requiredFields) {
|
||||
if (!(field in data)) {
|
||||
throw new Error(`Missing required field: ${field}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate field types
|
||||
if (typeof data.language !== "string" || !/^[a-z]{2}$/.test(data.language)) {
|
||||
throw new Error(
|
||||
"Invalid language format. Expected ISO 639-1 code (e.g., 'en')"
|
||||
);
|
||||
}
|
||||
|
||||
if (!["positive", "neutral", "negative"].includes(data.sentiment)) {
|
||||
throw new Error(
|
||||
"Invalid sentiment. Expected 'positive', 'neutral', or 'negative'"
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof data.escalated !== "boolean") {
|
||||
throw new Error("Invalid escalated. Expected boolean");
|
||||
}
|
||||
|
||||
if (typeof data.forwarded_hr !== "boolean") {
|
||||
throw new Error("Invalid forwarded_hr. Expected boolean");
|
||||
}
|
||||
|
||||
const validCategories = [
|
||||
"Schedule & Hours",
|
||||
"Leave & Vacation",
|
||||
"Sick Leave & Recovery",
|
||||
"Salary & Compensation",
|
||||
"Contract & Hours",
|
||||
"Onboarding",
|
||||
"Offboarding",
|
||||
"Workwear & Staff Pass",
|
||||
"Team & Contacts",
|
||||
"Personal Questions",
|
||||
"Access & Login",
|
||||
"Social questions",
|
||||
"Unrecognized / Other",
|
||||
];
|
||||
|
||||
if (!validCategories.includes(data.category)) {
|
||||
throw new Error(
|
||||
`Invalid category. Expected one of: ${validCategories.join(", ")}`
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof data.questions !== "string" && !Array.isArray(data.questions)) {
|
||||
throw new Error("Invalid questions. Expected string or array of strings");
|
||||
}
|
||||
|
||||
if (
|
||||
typeof data.summary !== "string" ||
|
||||
data.summary.length < 10 ||
|
||||
data.summary.length > 300
|
||||
) {
|
||||
throw new Error(
|
||||
"Invalid summary. Expected string between 10-300 characters"
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof data.tokens !== "number" || data.tokens < 0) {
|
||||
throw new Error("Invalid tokens. Expected non-negative number");
|
||||
}
|
||||
|
||||
if (typeof data.tokens_eur !== "number" || data.tokens_eur < 0) {
|
||||
throw new Error("Invalid tokens_eur. Expected non-negative number");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function to process unprocessed sessions
|
||||
*/
|
||||
async function processUnprocessedSessions() {
|
||||
console.log("Starting to process unprocessed sessions...");
|
||||
|
||||
// Find sessions that have transcript content but haven't been processed
|
||||
const sessionsToProcess = await prisma.session.findMany({
|
||||
where: {
|
||||
AND: [
|
||||
{ transcriptContent: { not: null } },
|
||||
{ transcriptContent: { not: "" } },
|
||||
{ processed: { not: true } }, // Either false or null
|
||||
],
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
transcriptContent: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (sessionsToProcess.length === 0) {
|
||||
console.log("No sessions found requiring processing.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found ${sessionsToProcess.length} sessions to process.`);
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (const session of sessionsToProcess) {
|
||||
if (!session.transcriptContent) {
|
||||
// Should not happen due to query, but good for type safety
|
||||
console.warn(
|
||||
`Session ${session.id} has no transcript content, skipping.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`Processing transcript for session ${session.id}...`);
|
||||
try {
|
||||
const processedData = await processTranscriptWithOpenAI(
|
||||
session.id,
|
||||
session.transcriptContent
|
||||
);
|
||||
|
||||
// Update the session with processed data
|
||||
await prisma.session.update({
|
||||
where: { id: session.id },
|
||||
data: {
|
||||
language: processedData.language,
|
||||
sentiment: processedData.sentiment,
|
||||
escalated: processedData.escalated,
|
||||
forwardedHr: processedData.forwarded_hr,
|
||||
category: processedData.category,
|
||||
questions: processedData.questions,
|
||||
summary: processedData.summary,
|
||||
tokens: {
|
||||
increment: processedData.tokens,
|
||||
},
|
||||
tokensEur: {
|
||||
increment: processedData.tokens_eur,
|
||||
},
|
||||
processed: true,
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Successfully processed session ${session.id}.`);
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error(`Error processing session ${session.id}:`, error);
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Session processing complete.");
|
||||
console.log(`Successfully processed: ${successCount} sessions.`);
|
||||
console.log(`Failed to process: ${errorCount} sessions.`);
|
||||
}
|
||||
|
||||
// Run the main function
|
||||
processUnprocessedSessions()
|
||||
.catch((e) => {
|
||||
console.error("An error occurred during the script execution:", e);
|
||||
process.exitCode = 1;
|
||||
})
|
||||
.finally(async () => {
|
||||
await prisma.$disconnect();
|
||||
});
|
||||
@@ -1,48 +0,0 @@
|
||||
// Reset all sessions to processed: false for reprocessing with new instructions
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function resetProcessedStatus() {
|
||||
try {
|
||||
console.log('🔄 Resetting processed status for all sessions...');
|
||||
|
||||
// Get count of currently processed sessions
|
||||
const processedCount = await prisma.session.count({
|
||||
where: { processed: true }
|
||||
});
|
||||
|
||||
console.log(`📊 Found ${processedCount} processed sessions to reset`);
|
||||
|
||||
if (processedCount === 0) {
|
||||
console.log('✅ No sessions need to be reset');
|
||||
return;
|
||||
}
|
||||
|
||||
// Reset all sessions to processed: false
|
||||
const result = await prisma.session.updateMany({
|
||||
where: { processed: true },
|
||||
data: {
|
||||
processed: false,
|
||||
// Also reset AI-generated fields so they get fresh analysis
|
||||
sentimentCategory: null,
|
||||
category: null,
|
||||
questions: null,
|
||||
summary: null,
|
||||
validData: true // Reset to default
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`✅ Successfully reset ${result.count} sessions to processed: false`);
|
||||
console.log('🤖 These sessions will be reprocessed with the new OpenAI instructions');
|
||||
console.log('🎯 Quality validation will now mark invalid data appropriately');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error resetting processed status:', error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// Run the script
|
||||
resetProcessedStatus();
|
||||
@@ -1,83 +0,0 @@
|
||||
// Test script to demonstrate the automated processing system
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { processUnprocessedSessions, startProcessingScheduler } from '../lib/processingScheduler.ts';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function testAutomation() {
|
||||
console.log('🧪 TESTING AUTOMATED PROCESSING SYSTEM\n');
|
||||
|
||||
// Step 1: Show current status
|
||||
console.log('📊 STEP 1: Current Database Status');
|
||||
console.log('=' .repeat(50));
|
||||
await showStatus();
|
||||
|
||||
// Step 2: Test the automated function
|
||||
console.log('\n🤖 STEP 2: Testing Automated Processing Function');
|
||||
console.log('=' .repeat(50));
|
||||
console.log('This is the SAME function that runs automatically every hour...\n');
|
||||
|
||||
try {
|
||||
// This is the EXACT same function that runs automatically every hour
|
||||
const result = await processUnprocessedSessions(5, 2); // Smaller batch for demo
|
||||
|
||||
console.log('\n✅ AUTOMATION TEST RESULTS:');
|
||||
console.log(` 📊 Sessions processed: ${result.totalProcessed}`);
|
||||
console.log(` ❌ Sessions failed: ${result.totalFailed}`);
|
||||
console.log(` ⏱️ Processing time: ${result.totalTime.toFixed(2)}s`);
|
||||
|
||||
if (result.totalProcessed === 0 && result.totalFailed === 0) {
|
||||
console.log('\n🎉 PERFECT! No unprocessed sessions found.');
|
||||
console.log('✅ This means the automation is working - everything is already processed!');
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error testing automation:', error);
|
||||
}
|
||||
|
||||
// Step 3: Show what the scheduler does
|
||||
console.log('\n⏰ STEP 3: Automated Scheduler Information');
|
||||
console.log('=' .repeat(50));
|
||||
console.log('🔄 HOURLY AUTOMATION:');
|
||||
console.log(' • Runs every hour: cron.schedule("0 * * * *")');
|
||||
console.log(' • Checks: WHERE processed = false AND messages: { some: {} }');
|
||||
console.log(' • Processes: ALL unprocessed sessions through OpenAI');
|
||||
console.log(' • Continues: Until NO unprocessed sessions remain');
|
||||
console.log(' • Quality: Validates and filters low-quality sessions');
|
||||
|
||||
console.log('\n🚀 DASHBOARD INTEGRATION:');
|
||||
console.log(' • Refresh button triggers: triggerCompleteWorkflow()');
|
||||
console.log(' • Fetches transcripts: For sessions without messages');
|
||||
console.log(' • Processes everything: Until all sessions are analyzed');
|
||||
|
||||
console.log('\n🎯 PRODUCTION STATUS:');
|
||||
console.log(' ✅ System is FULLY AUTOMATED');
|
||||
console.log(' ✅ No manual intervention needed');
|
||||
console.log(' ✅ Processes new data automatically');
|
||||
console.log(' ✅ Quality validation included');
|
||||
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
|
||||
async function showStatus() {
|
||||
const totalSessions = await prisma.session.count();
|
||||
const processedSessions = await prisma.session.count({ where: { processed: true } });
|
||||
const unprocessedSessions = await prisma.session.count({ where: { processed: false } });
|
||||
const sessionsWithMessages = await prisma.session.count({
|
||||
where: { messages: { some: {} } }
|
||||
});
|
||||
|
||||
console.log(`📈 Total sessions: ${totalSessions}`);
|
||||
console.log(`✅ Processed sessions: ${processedSessions}`);
|
||||
console.log(`⏳ Unprocessed sessions: ${unprocessedSessions}`);
|
||||
console.log(`💬 Sessions with messages: ${sessionsWithMessages}`);
|
||||
|
||||
if (processedSessions === sessionsWithMessages && unprocessedSessions === 0) {
|
||||
console.log('\n🎉 AUTOMATION WORKING PERFECTLY!');
|
||||
console.log('✅ All sessions with messages have been processed');
|
||||
console.log('✅ No unprocessed sessions remaining');
|
||||
}
|
||||
}
|
||||
|
||||
// Run the test
|
||||
testAutomation();
|
||||
@@ -1,47 +0,0 @@
|
||||
// Test the improved prompt on a few sessions
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function testImprovedPrompt() {
|
||||
console.log('🧪 TESTING IMPROVED QUESTION EXTRACTION PROMPT\n');
|
||||
|
||||
// Reset a few sessions to test the new prompt
|
||||
console.log('📝 Resetting 5 sessions to test improved prompt...');
|
||||
|
||||
const sessionsToReprocess = await prisma.session.findMany({
|
||||
where: {
|
||||
processed: true,
|
||||
questions: '[]' // Sessions with empty questions
|
||||
},
|
||||
take: 5
|
||||
});
|
||||
|
||||
if (sessionsToReprocess.length > 0) {
|
||||
// Reset these sessions to unprocessed
|
||||
await prisma.session.updateMany({
|
||||
where: {
|
||||
id: { in: sessionsToReprocess.map(s => s.id) }
|
||||
},
|
||||
data: {
|
||||
processed: false,
|
||||
questions: null,
|
||||
summary: null
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`✅ Reset ${sessionsToReprocess.length} sessions for reprocessing`);
|
||||
console.log('Session IDs:', sessionsToReprocess.map(s => s.id));
|
||||
|
||||
console.log('\n🚀 Now run this command to test the improved prompt:');
|
||||
console.log('npx tsx scripts/trigger-processing-direct.js');
|
||||
console.log('\nThen check the results with:');
|
||||
console.log('npx tsx scripts/check-questions-issue.js');
|
||||
} else {
|
||||
console.log('❌ No sessions with empty questions found to reprocess');
|
||||
}
|
||||
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
|
||||
testImprovedPrompt();
|
||||
@@ -1,83 +0,0 @@
|
||||
// Script to check processing status and trigger processing
|
||||
// Usage: node scripts/test-processing-status.js
|
||||
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function checkProcessingStatus() {
|
||||
try {
|
||||
console.log("🔍 Checking processing status...\n");
|
||||
|
||||
// Get processing status
|
||||
const totalSessions = await prisma.session.count();
|
||||
const processedSessions = await prisma.session.count({
|
||||
where: { processed: true },
|
||||
});
|
||||
const unprocessedSessions = await prisma.session.count({
|
||||
where: { processed: false },
|
||||
});
|
||||
const sessionsWithMessages = await prisma.session.count({
|
||||
where: {
|
||||
processed: false,
|
||||
messages: { some: {} },
|
||||
},
|
||||
});
|
||||
|
||||
console.log("📊 Processing Status:");
|
||||
console.log(` Total sessions: ${totalSessions}`);
|
||||
console.log(` ✅ Processed: ${processedSessions}`);
|
||||
console.log(` ⏳ Unprocessed: ${unprocessedSessions}`);
|
||||
console.log(` 📝 Unprocessed with messages: ${sessionsWithMessages}`);
|
||||
|
||||
const processedPercentage = (
|
||||
(processedSessions / totalSessions) *
|
||||
100
|
||||
).toFixed(1);
|
||||
console.log(` 📈 Processing progress: ${processedPercentage}%\n`);
|
||||
|
||||
// Check recent processing activity
|
||||
const recentlyProcessed = await prisma.session.findMany({
|
||||
where: {
|
||||
processed: true,
|
||||
createdAt: {
|
||||
gte: new Date(Date.now() - 60 * 60 * 1000), // Last hour
|
||||
},
|
||||
},
|
||||
orderBy: { createdAt: "desc" },
|
||||
take: 5,
|
||||
select: {
|
||||
id: true,
|
||||
createdAt: true,
|
||||
category: true,
|
||||
sentiment: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (recentlyProcessed.length > 0) {
|
||||
console.log("🕒 Recently processed sessions:");
|
||||
recentlyProcessed.forEach((session) => {
|
||||
const timeAgo = Math.round(
|
||||
(Date.now() - session.createdAt.getTime()) / 1000 / 60
|
||||
);
|
||||
console.log(
|
||||
` • ${session.id.substring(0, 8)}... (${timeAgo}m ago) - ${session.category || "No category"}`
|
||||
);
|
||||
});
|
||||
} else {
|
||||
console.log("🕒 No sessions processed in the last hour");
|
||||
}
|
||||
|
||||
console.log("\n✨ Processing system is working correctly!");
|
||||
console.log("💡 The parallel processing successfully processed sessions.");
|
||||
console.log(
|
||||
"🎯 For manual triggers, you need to be logged in as an admin user."
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("❌ Error checking status:", error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
checkProcessingStatus();
|
||||
@@ -1,57 +0,0 @@
|
||||
// Trigger CSV refresh for all companies
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function triggerCsvRefresh() {
|
||||
try {
|
||||
console.log('🔄 Triggering CSV refresh for all companies...\n');
|
||||
|
||||
// Get all companies
|
||||
const companies = await prisma.company.findMany();
|
||||
|
||||
if (companies.length === 0) {
|
||||
console.log('❌ No companies found. Run seed script first.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`🏢 Found ${companies.length} companies:`);
|
||||
|
||||
for (const company of companies) {
|
||||
console.log(`📊 Company: ${company.name} (ID: ${company.id})`);
|
||||
console.log(`📥 CSV URL: ${company.csvUrl}`);
|
||||
|
||||
try {
|
||||
const response = await fetch('http://localhost:3000/api/admin/refresh-sessions', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
companyId: company.id
|
||||
})
|
||||
});
|
||||
|
||||
const result = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
console.log(`✅ Successfully imported ${result.imported} sessions for ${company.name}`);
|
||||
} else {
|
||||
console.log(`❌ Error for ${company.name}: ${result.error}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`❌ Failed to refresh ${company.name}: ${error.message}`);
|
||||
}
|
||||
|
||||
console.log(''); // Empty line for readability
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error triggering CSV refresh:', error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// Run the script
|
||||
triggerCsvRefresh();
|
||||
@@ -1,21 +0,0 @@
|
||||
// Direct processing trigger without authentication
|
||||
import { processUnprocessedSessions } from '../lib/processingScheduler.ts';
|
||||
|
||||
async function triggerProcessing() {
|
||||
try {
|
||||
console.log('🤖 Starting complete batch processing of all unprocessed sessions...\n');
|
||||
|
||||
// Process all unprocessed sessions in batches until completion
|
||||
const result = await processUnprocessedSessions(10, 3);
|
||||
|
||||
console.log('\n🎉 Complete processing finished!');
|
||||
console.log(`📊 Final results: ${result.totalProcessed} processed, ${result.totalFailed} failed`);
|
||||
console.log(`⏱️ Total time: ${result.totalTime.toFixed(2)}s`);
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error during processing:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the script
|
||||
triggerProcessing();
|
||||
39
server.js
39
server.js
@@ -1,39 +0,0 @@
|
||||
// Custom Next.js server with scheduler initialization
|
||||
const { createServer } = require("http");
|
||||
const { parse } = require("url");
|
||||
const next = require("next");
|
||||
const { startScheduler } = require("./lib/scheduler");
|
||||
const { startProcessingScheduler } = require("./lib/processingScheduler");
|
||||
|
||||
const dev = process.env.NODE_ENV !== "production";
|
||||
const hostname = "localhost";
|
||||
const port = process.env.PORT || 3000;
|
||||
|
||||
// Initialize Next.js
|
||||
const app = next({ dev, hostname, port });
|
||||
const handle = app.getRequestHandler();
|
||||
|
||||
app.prepare().then(() => {
|
||||
// Initialize schedulers when the server starts
|
||||
console.log("Starting schedulers...");
|
||||
startScheduler();
|
||||
startProcessingScheduler();
|
||||
console.log("All schedulers initialized successfully");
|
||||
|
||||
createServer(async (req, res) => {
|
||||
try {
|
||||
// Parse the URL
|
||||
const parsedUrl = parse(req.url, true);
|
||||
|
||||
// Let Next.js handle the request
|
||||
await handle(req, res, parsedUrl);
|
||||
} catch (err) {
|
||||
console.error("Error occurred handling", req.url, err);
|
||||
res.statusCode = 500;
|
||||
res.end("Internal Server Error");
|
||||
}
|
||||
}).listen(port, (err) => {
|
||||
if (err) throw err;
|
||||
console.log(`> Ready on http://${hostname}:${port}`);
|
||||
});
|
||||
});
|
||||
46
server.ts
46
server.ts
@@ -1,46 +0,0 @@
|
||||
// Custom Next.js server with scheduler initialization
|
||||
import { createServer } from "http";
|
||||
import { parse } from "url";
|
||||
import next from "next";
|
||||
import { processUnprocessedSessions } from "./lib/processingSchedulerNoCron.js";
|
||||
|
||||
const dev = process.env.NODE_ENV !== "production";
|
||||
const hostname = "localhost";
|
||||
const port = parseInt(process.env.PORT || "3000", 10);
|
||||
|
||||
// Initialize Next.js
|
||||
const app = next({ dev, hostname, port });
|
||||
const handle = app.getRequestHandler();
|
||||
|
||||
app.prepare().then(() => {
|
||||
// Start processing scheduler in the background
|
||||
const BATCH_SIZE = 10;
|
||||
const MAX_CONCURRENCY = 5;
|
||||
const SCHEDULER_INTERVAL = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
// Initial processing run
|
||||
processUnprocessedSessions(BATCH_SIZE, MAX_CONCURRENCY).catch(console.error);
|
||||
|
||||
// Schedule regular processing
|
||||
setInterval(() => {
|
||||
processUnprocessedSessions(BATCH_SIZE, MAX_CONCURRENCY).catch(console.error);
|
||||
}, SCHEDULER_INTERVAL);
|
||||
|
||||
console.log("Processing scheduler started with 5 minute interval");
|
||||
|
||||
createServer(async (req, res) => {
|
||||
try {
|
||||
// Parse the URL
|
||||
const parsedUrl = parse(req.url || "", true);
|
||||
|
||||
// Let Next.js handle the request
|
||||
await handle(req, res, parsedUrl);
|
||||
} catch (err) {
|
||||
console.error("Error occurred handling", req.url, err);
|
||||
res.statusCode = 500;
|
||||
res.end("Internal Server Error");
|
||||
}
|
||||
}).listen(port, () => {
|
||||
console.log(`> Ready on http://${hostname}:${port}`);
|
||||
});
|
||||
});
|
||||
231
src/index.ts
Normal file
231
src/index.ts
Normal file
@@ -0,0 +1,231 @@
|
||||
// Cloudflare Worker entry point for LiveDash-Node
|
||||
// This file handles requests when deployed to Cloudflare Workers
|
||||
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { PrismaD1 } from '@prisma/adapter-d1';
|
||||
import { formatError } from './utils/error';
|
||||
|
||||
export interface Env {
|
||||
DB: D1Database;
|
||||
NEXTAUTH_SECRET?: string;
|
||||
NEXTAUTH_URL?: string;
|
||||
WORKER_ENV?: string; // 'development' | 'production'
|
||||
}
|
||||
|
||||
export default {
|
||||
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
|
||||
try {
|
||||
// Initialize Prisma with D1 adapter
|
||||
const adapter = new PrismaD1(env.DB);
|
||||
const prisma = new PrismaClient({ adapter });
|
||||
|
||||
const url = new URL(request.url);
|
||||
|
||||
// CORS headers for all responses
|
||||
const corsHeaders = {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
|
||||
};
|
||||
|
||||
// Handle preflight requests
|
||||
if (request.method === 'OPTIONS') {
|
||||
return new Response(null, { headers: corsHeaders });
|
||||
}
|
||||
|
||||
// Handle API routes
|
||||
if (url.pathname.startsWith('/api/')) {
|
||||
|
||||
// Simple health check endpoint
|
||||
if (url.pathname === '/api/health') {
|
||||
const companyCount = await prisma.company.count();
|
||||
const sessionCount = await prisma.session.count();
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
status: 'healthy',
|
||||
database: 'connected',
|
||||
companies: companyCount,
|
||||
sessions: sessionCount,
|
||||
timestamp: new Date().toISOString()
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...corsHeaders
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Test metrics endpoint
|
||||
if (url.pathname === '/api/test-metrics') {
|
||||
const sessions = await prisma.session.findMany({
|
||||
take: 10,
|
||||
orderBy: { startTime: 'desc' }
|
||||
});
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
message: 'LiveDash API running on Cloudflare Workers with D1',
|
||||
recentSessions: sessions.length,
|
||||
sessions: sessions
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...corsHeaders
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// For other API routes, return a placeholder response
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
message: 'API endpoint not implemented in worker yet',
|
||||
path: url.pathname,
|
||||
method: request.method,
|
||||
note: 'This endpoint needs to be migrated from Next.js API routes'
|
||||
}),
|
||||
{
|
||||
status: 501,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...corsHeaders
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Handle root path - simple test page
|
||||
if (url.pathname === '/') {
|
||||
try {
|
||||
const companies = await prisma.company.findMany();
|
||||
const recentSessions = await prisma.session.findMany({
|
||||
take: 5,
|
||||
orderBy: { startTime: 'desc' },
|
||||
include: { company: { select: { name: true } } }
|
||||
});
|
||||
|
||||
return new Response(
|
||||
`
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>LiveDash-Node on Cloudflare Workers</title>
|
||||
<link rel="stylesheet" type="text/css" href="https://static.integrations.cloudflare.com/styles.css">
|
||||
<style>
|
||||
.container { max-width: 1000px; margin: 0 auto; padding: 20px; }
|
||||
.grid { display: grid; grid-template-columns: 1fr 1fr; gap: 20px; margin: 20px 0; }
|
||||
.card { background: #f8f9fa; padding: 20px; border-radius: 8px; border: 1px solid #e9ecef; }
|
||||
pre { background: #f5f5f5; padding: 15px; border-radius: 5px; overflow-x: auto; font-size: 12px; }
|
||||
.api-list { list-style: none; padding: 0; }
|
||||
.api-list li { margin: 8px 0; }
|
||||
.api-list a { color: #0066cc; text-decoration: none; }
|
||||
.api-list a:hover { text-decoration: underline; }
|
||||
.status { color: #28a745; font-weight: bold; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<header>
|
||||
<img
|
||||
src="https://imagedelivery.net/wSMYJvS3Xw-n339CbDyDIA/30e0d3f6-6076-40f8-7abb-8a7676f83c00/public"
|
||||
/>
|
||||
<h1>🎉 LiveDash-Node Successfully Connected to D1!</h1>
|
||||
<p class="status">✓ Database Connected | ✓ Prisma Client Working | ✓ D1 Adapter Active</p>
|
||||
</header>
|
||||
|
||||
<div class="grid">
|
||||
<div class="card">
|
||||
<h3>📊 Database Stats</h3>
|
||||
<ul>
|
||||
<li><strong>Companies:</strong> ${companies.length}</li>
|
||||
<li><strong>Recent Sessions:</strong> ${recentSessions.length}</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="card">
|
||||
<h3>🔗 Test API Endpoints</h3>
|
||||
<ul class="api-list">
|
||||
<li><a href="/api/health">/api/health</a> - Health check</li>
|
||||
<li><a href="/api/test-metrics">/api/test-metrics</a> - Sample data</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card">
|
||||
<h3>🏢 Companies in Database</h3>
|
||||
<pre>${companies.length > 0 ? JSON.stringify(companies, null, 2) : 'No companies found'}</pre>
|
||||
</div>
|
||||
|
||||
<div class="card">
|
||||
<h3>📈 Recent Sessions</h3>
|
||||
<pre>${recentSessions.length > 0 ? JSON.stringify(recentSessions, null, 2) : 'No sessions found'}</pre>
|
||||
</div>
|
||||
|
||||
<footer style="margin-top: 40px; text-align: center; color: #666;">
|
||||
<small>
|
||||
<a target="_blank" href="https://developers.cloudflare.com/d1/">Learn more about Cloudflare D1</a> |
|
||||
<a target="_blank" href="https://www.prisma.io/docs/guides/deployment/deployment-guides/deploying-to-cloudflare-workers">Prisma + Workers Guide</a>
|
||||
</small>
|
||||
</footer>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
`,
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'text/html',
|
||||
...corsHeaders
|
||||
},
|
||||
}
|
||||
);
|
||||
} catch (dbError) {
|
||||
return new Response(
|
||||
`
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head><title>LiveDash-Node - Database Error</title></head>
|
||||
<body>
|
||||
<h1>❌ Database Connection Error</h1>
|
||||
<p>Error: ${dbError instanceof Error ? dbError.message : 'Unknown database error'}</p>
|
||||
<p>Check your D1 database configuration and make sure migrations have been applied.</p>
|
||||
</body>
|
||||
</html>
|
||||
`,
|
||||
{
|
||||
status: 500,
|
||||
headers: { 'Content-Type': 'text/html' },
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle all other routes
|
||||
return new Response('Not Found - This endpoint is not available in the worker deployment', {
|
||||
status: 404,
|
||||
headers: corsHeaders
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Worker error:', error); // Log full error details, including stack trace
|
||||
|
||||
// Use the formatError utility to properly format the error response
|
||||
const errorPayload = formatError(error, env);
|
||||
|
||||
return new Response(
|
||||
JSON.stringify(errorPayload),
|
||||
{
|
||||
status: 500,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Access-Control-Allow-Origin': '*'
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
16
src/utils/error.ts
Normal file
16
src/utils/error.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
export function formatError(error: unknown, env?: { WORKER_ENV?: string }): Record<string, unknown> {
|
||||
const payload: Record<string, unknown> = {
|
||||
error: 'Internal Server Error',
|
||||
message: error instanceof Error ? error.message : 'Unknown error'
|
||||
};
|
||||
|
||||
// Only include stack trace in development environment
|
||||
// In Cloudflare Workers, check environment via env parameter
|
||||
const isDevelopment = env?.WORKER_ENV !== 'production';
|
||||
|
||||
if (isDevelopment) {
|
||||
payload.stack = error instanceof Error ? error.stack : undefined;
|
||||
}
|
||||
|
||||
return payload;
|
||||
}
|
||||
36
tests/formatError.test.ts
Normal file
36
tests/formatError.test.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { test } from 'node:test';
|
||||
import assert from 'node:assert';
|
||||
import { formatError } from '../src/utils/error';
|
||||
|
||||
const originalEnv = process.env.NODE_ENV;
|
||||
|
||||
test('includes stack when not in production', () => {
|
||||
const err = new Error('boom');
|
||||
const payload = formatError(err, { WORKER_ENV: 'development' });
|
||||
assert.ok('stack' in payload);
|
||||
});
|
||||
|
||||
test('omits stack in production', () => {
|
||||
const err = new Error('boom');
|
||||
const payload = formatError(err, { WORKER_ENV: 'production' });
|
||||
assert.ok(!('stack' in payload));
|
||||
});
|
||||
|
||||
test('includes message for all environments', () => {
|
||||
const err = new Error('boom');
|
||||
const devPayload = formatError(err, { WORKER_ENV: 'development' });
|
||||
const prodPayload = formatError(err, { WORKER_ENV: 'production' });
|
||||
|
||||
assert.strictEqual(devPayload.message, 'boom');
|
||||
assert.strictEqual(prodPayload.message, 'boom');
|
||||
});
|
||||
|
||||
test('handles non-Error objects', () => {
|
||||
const payload = formatError('string error', { WORKER_ENV: 'development' });
|
||||
assert.strictEqual(payload.message, 'Unknown error');
|
||||
assert.strictEqual(payload.error, 'Internal Server Error');
|
||||
});
|
||||
|
||||
test.after(() => {
|
||||
if (originalEnv === undefined) delete process.env.NODE_ENV; else process.env.NODE_ENV = originalEnv;
|
||||
});
|
||||
@@ -8,9 +8,11 @@
|
||||
"jsx": "preserve",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"moduleResolution": "node", // bundler
|
||||
"noEmit": true,
|
||||
"noImplicitAny": false, // Allow implicit any types
|
||||
"preserveSymlinks": false,
|
||||
"types": ["./worker-configuration.d.ts"],
|
||||
"paths": {
|
||||
"@/*": ["./*"]
|
||||
},
|
||||
@@ -23,10 +25,11 @@
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"strictNullChecks": true,
|
||||
"target": "es5"
|
||||
"target": "ESNext"
|
||||
},
|
||||
"exclude": ["node_modules"],
|
||||
"include": [
|
||||
"src",
|
||||
"next-env.d.ts",
|
||||
"**/*.ts",
|
||||
"**/*.tsx",
|
||||
|
||||
6870
worker-configuration.d.ts
vendored
Normal file
6870
worker-configuration.d.ts
vendored
Normal file
File diff suppressed because it is too large
Load Diff
55
wrangler.json
Normal file
55
wrangler.json
Normal file
@@ -0,0 +1,55 @@
|
||||
/**
|
||||
* For more details on how to configure Wrangler, refer to:
|
||||
* https://developers.cloudflare.com/workers/wrangler/configuration/
|
||||
*/
|
||||
{
|
||||
"$schema": "node_modules/wrangler/config-schema.json",
|
||||
"compatibility_date": "2025-04-01",
|
||||
"main": "src/index.ts",
|
||||
"name": "livedash",
|
||||
"upload_source_maps": true,
|
||||
"d1_databases": [
|
||||
{
|
||||
"binding": "DB",
|
||||
"database_id": "d4ee7efe-d37a-48e4-bed7-fdfaa5108131",
|
||||
"database_name": "d1-notso-livedash"
|
||||
}
|
||||
],
|
||||
"observability": {
|
||||
"enabled": true
|
||||
}
|
||||
/**
|
||||
* Smart Placement
|
||||
* Docs: https://developers.cloudflare.com/workers/configuration/smart-placement/#smart-placement
|
||||
*/
|
||||
// "placement": { "mode": "smart" },
|
||||
|
||||
/**
|
||||
* Bindings
|
||||
* Bindings allow your Worker to interact with resources on the Cloudflare Developer Platform, including
|
||||
* databases, object storage, AI inference, real-time communication and more.
|
||||
* https://developers.cloudflare.com/workers/runtime-apis/bindings/
|
||||
*/
|
||||
|
||||
/**
|
||||
* Environment Variables
|
||||
* https://developers.cloudflare.com/workers/wrangler/configuration/#environment-variables
|
||||
*/
|
||||
// "vars": { "MY_VARIABLE": "production_value" },
|
||||
/**
|
||||
* Note: Use secrets to store sensitive data.
|
||||
* https://developers.cloudflare.com/workers/configuration/secrets/
|
||||
*/
|
||||
|
||||
/**
|
||||
* Static Assets
|
||||
* https://developers.cloudflare.com/workers/static-assets/binding/
|
||||
*/
|
||||
// "assets": { "directory": "./public/", "binding": "ASSETS" },
|
||||
|
||||
/**
|
||||
* Service Bindings (communicate between multiple Workers)
|
||||
* https://developers.cloudflare.com/workers/wrangler/configuration/#service-bindings
|
||||
*/
|
||||
// "services": [{ "binding": "MY_SERVICE", "service": "my-service" }]
|
||||
}
|
||||
Reference in New Issue
Block a user