diff --git a/dist/components/App.js b/dist/components/App.js deleted file mode 100644 index b0a0bf6..0000000 --- a/dist/components/App.js +++ /dev/null @@ -1,75 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const react_1 = __importStar(require("react")); -const recorder_service_1 = require("../services/recorder.service"); -const player_service_1 = require("../services/player.service"); -const recorder = new recorder_service_1.RecorderService(); -const player = new player_service_1.PlayerService(); -const App = () => { - const [recording, setRecording] = (0, react_1.useState)(false); - const [basicCode, setBasicCode] = (0, react_1.useState)(''); - const handleStartRecording = async () => { - setRecording(true); - await recorder.startRecording(); - }; - const handleStopRecording = async () => { - //@ts-ignore - if (window.microphone) { - //@ts-ignore - window.stopMicrophone(); - console.log('Microphone stopped'); - } - setRecording(false); - const code = await recorder.stopRecording(); - setBasicCode(code); - // Save to file - const blob = new Blob([code], { type: 'text/plain' }); - const url = URL.createObjectURL(blob); - const a = document.createElement('a'); - a.href = url; - a.download = 'automation.bas'; - a.click(); - }; - const handlePlayback = async () => { - try { - await player.executeBasicCode(basicCode); - } - catch (error) { - console.error('Playback error:', error); - } - }; - return (react_1.default.createElement("div", { className: "p-4 h-auto" }, - react_1.default.createElement("h1", { className: "text-2xl font-bold mb-4" }, "General Bots Desktop"), - react_1.default.createElement("div", { className: "space-x-4 mb-4 h-auto" }, - react_1.default.createElement("button", { id: "startBtn", className: `px-4 py-2 rounded ${recording ? 'bg-red-500' : 'bg-blue-500'} text-white`, onClick: recording ? handleStopRecording : handleStartRecording }, recording ? 'Stop Recording' : 'Start Recording'), - react_1.default.createElement("button", { id: "stopBtn", className: "px-4 py-2 rounded bg-green-500 text-white", onClick: handlePlayback, disabled: !basicCode }, "Play Recording")), - react_1.default.createElement("div", { className: "mt-4 h-20" }, - react_1.default.createElement("h2", { className: "text-xl font-bold mb-2" }, "Generated BASIC Code:"), - react_1.default.createElement("pre", { className: "h-20 min-h-100 bg-gray-100 p-2 rounded border" }, basicCode)), - react_1.default.createElement("div", { className: "mb-4" }, - react_1.default.createElement("a", { href: "https://github.com/General Bots" }, "General Bots")))); -}; -exports.default = App; diff --git a/dist/main/main.js b/dist/main/main.js deleted file mode 100644 index 3e19b9f..0000000 --- a/dist/main/main.js +++ /dev/null @@ -1,313 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.cleanupAudioCapture = cleanupAudioCapture; -require('dotenv').config(); -require('electron-require'); -const electron_1 = require("electron"); -const electron_2 = require("electron"); -const path = __importStar(require("path")); -const electron_3 = require("electron"); -const recorder_service_1 = require("../services/recorder.service"); -const player_service_1 = require("../services/player.service"); -const audioCapture = { - mediaRecorder: null, - audioStream: null, - analyserNode: null, - audioData: null, - isCapturing: false -}; -const recorder = new recorder_service_1.RecorderService(); -const player = new player_service_1.PlayerService(); -function createWindow() { - const mainWindow = new electron_2.BrowserWindow({ - width: 700, - height: 500, - backgroundColor: "grey", - center: true, - maximizable: false, - thickFrame: true, - autoHideMenuBar: true, - webPreferences: { - experimentalFeatures: true, - nodeIntegrationInWorker: true, - nodeIntegration: true, - nodeIntegrationInSubFrames: true, - contextIsolation: false, - preload: path.join(__dirname, '../preload/preload.js') - } - }); - electron_2.ipcMain.handle('request-microphone', async () => { - try { - const stream = await mainWindow.webContents.executeJavaScript(` - (async () => { - try { - const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); - return stream; - } catch (error) { - console.error('Error accessing microphone:', error); - throw error; - } - })(); - `); - return stream; // Return the stream to the UserService - } - catch (error) { - console.error('Failed to get microphone stream:', error); - throw error; - } - }); - mainWindow.setAutoHideMenuBar(true); - mainWindow.setMaximizable(false); - if (process.env.NODE_ENV === 'development') { - mainWindow.loadURL('http://localhost:8080'); - mainWindow.webContents.openDevTools(); - } - else { - mainWindow.loadFile(path.join(__dirname, '../../src/renderer/index.html')); - } - electron_2.ipcMain.handle('mouse-event', recorder.handleMouseEvent.bind(recorder)); - electron_2.ipcMain.handle('request-microphone', async () => { - try { - const stream = await mainWindow.webContents.executeJavaScript(` - (async () => { - try { - const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); - return stream; - } catch (error) { - console.error('Error accessing microphone:', error); - throw error; - } - })(); - `); - return stream; // Return the stream to the UserService - } - catch (error) { - console.error('Failed to get microphone stream:', error); - throw error; - } - }); - electron_2.ipcMain.handle('keyboard-event', recorder.handleKeyboardEvent.bind(recorder)); - // Handler to capture the entire screen - electron_2.ipcMain.handle('get-screenshot', async () => { - console.log('get-screenshot called'); - const sources = await electron_2.desktopCapturer.getSources({ types: ['screen'] }); - const screenSource = sources[0]; // Get the first screen source - const { thumbnail } = screenSource; // Thumbnail is a native image - return thumbnail.toPNG(); // Return the screenshot as PNG buffer - }); - electron_2.ipcMain.handle('start-recording', async () => { - console.log('start-recording called'); - await recorder.startRecording(); - }); - electron_2.ipcMain.handle('stop-recording', async () => { - console.log('stop-recording called'); - return await recorder.stopRecording(); - }); - electron_2.ipcMain.handle('execute-basic-code', async (_, code) => { - console.log('execute-basic-code called with:', code); - await player.executeBasicCode(code); - }); - electron_2.ipcMain.handle('check-microphone-permission', async () => { - console.log('check-microphone-permission called'); - if (process.platform === 'darwin') { - const status = await electron_3.systemPreferences.getMediaAccessStatus('microphone'); - if (status !== 'granted') { - const success = await electron_3.systemPreferences.askForMediaAccess('microphone'); - return success; - } - return true; - } - return true; // On Windows/Linux, permissions are handled by the OS - }); - electron_2.ipcMain.handle('start-microphone-capture', async (event) => { - const window = electron_2.BrowserWindow.fromWebContents(event.sender); - if (!window) { - throw new Error('No window found for this request'); - } - return startMicrophoneCapture(window); - }); - electron_2.ipcMain.handle('stop-microphone-capture', async (event) => { - const window = electron_2.BrowserWindow.fromWebContents(event.sender); - if (!window) { - throw new Error('No window found for this request'); - } - return stopMicrophoneCapture(window); - }); - electron_2.ipcMain.handle('start-microphone-capture', async (event, ...args) => { - // Perform asynchronous microphone capture logic here - try { - const result = await startMicrophoneCapture(args[0]); // Assuming this function is async - return result; - } - catch (error) { - console.error("Error during microphone capture:", error); - throw error; // Send the error back to the renderer - } - }); - electron_2.ipcMain.handle('stop-microphone-capture', async (event, ...args) => { - try { - const result = await stopMicrophoneCapture(args[0]); - return result; - } - catch (error) { - console.error("Error stopping microphone capture:", error); - throw error; // Send the error back to the renderer - } - }); -} -electron_1.app.whenReady().then(createWindow); -electron_1.app.on('window-all-closed', () => { - if (process.platform !== 'darwin') { - electron_1.app.quit(); - } -}); -electron_1.app.on('activate', () => { - if (electron_2.BrowserWindow.getAllWindows().length === 0) { - createWindow(); - } -}); -// Enable required permissions -electron_1.app.commandLine.appendSwitch('enable-speech-dispatcher'); -// Register cleanup on app quit -electron_1.app.on('will-quit', cleanupAudioCapture); -// Function to get the focused window or first available window -function getFocusedWindow() { - const focusedWindow = electron_2.BrowserWindow.getFocusedWindow(); - if (focusedWindow) - return focusedWindow; - const windows = electron_2.BrowserWindow.getAllWindows(); - return windows.length > 0 ? windows[0] : null; -} -// Function to safely send to window -function sendToWindow(channel, ...args) { - const window = getFocusedWindow(); - if (window && !window.isDestroyed()) { - window.webContents.send(channel, ...args); - } -} -async function startMicrophoneCapture(window) { - console.log('Starting microphone capture...'); - try { - navigator.mediaDevices; - // Request microphone access - //@ts-ignore - const stream = await window.myApi.startMicrophone(); - audioCapture.audioStream = stream; - // Set up audio analysis - const audioContext = new (window.AudioContext || window.webkitAudioContext)(); - const sourceNode = audioContext.createMediaStreamSource(stream); - audioCapture.analyserNode = audioContext.createAnalyser(); - audioCapture.analyserNode.fftSize = 2048; - sourceNode.connect(audioCapture.analyserNode); - audioCapture.audioData = new Uint8Array(audioCapture.analyserNode.frequencyBinCount); - // Set up MediaRecorder - audioCapture.mediaRecorder = new MediaRecorder(stream, { - mimeType: 'audio/webm;codecs=opus' - }); - // Handle audio data - audioCapture.mediaRecorder.ondataavailable = (event) => { - if (event.data.size > 0 && !window.isDestroyed()) { - // Convert blob to buffer and send to renderer - const reader = new FileReader(); - reader.onloadend = () => { - const buffer = Buffer.from(reader.result); - window.webContents.send('audio-chunk', buffer); - }; - reader.readAsArrayBuffer(event.data); - } - }; - // Start recording - audioCapture.mediaRecorder.start(1000); // Capture in 1-second chunks - audioCapture.isCapturing = true; - // Start audio level monitoring - monitorAudioLevels(window); - console.log('Microphone capture started successfully'); - } - catch (error) { - console.error('Failed to start microphone capture:', error); - throw error; - } -} -function monitorAudioLevels(window) { - if (!audioCapture.isCapturing || !audioCapture.analyserNode || !audioCapture.audioData || window.isDestroyed()) { - return; - } - // Get audio level data - audioCapture.analyserNode.getByteFrequencyData(audioCapture.audioData); - // Calculate average volume level (0-1) - const average = audioCapture.audioData.reduce((acc, value) => acc + value, 0) / - audioCapture.audioData.length / - 255; - // Send level to renderer - if (!window.isDestroyed()) { - window.webContents.send('audio-level', average); - } - // Continue monitoring - requestAnimationFrame(() => monitorAudioLevels(window)); -} -function stopMicrophoneCapture(window) { - console.log('Stopping microphone capture...'); - try { - if (audioCapture.mediaRecorder && audioCapture.mediaRecorder.state !== 'inactive') { - audioCapture.mediaRecorder.stop(); - } - if (audioCapture.audioStream) { - audioCapture.audioStream.getTracks().forEach(track => track.stop()); - } - if (audioCapture.analyserNode) { - audioCapture.analyserNode.disconnect(); - } - audioCapture.isCapturing = false; - audioCapture.mediaRecorder = null; - audioCapture.audioStream = null; - audioCapture.analyserNode = null; - audioCapture.audioData = null; - if (!window.isDestroyed()) { - window.webContents.send('microphone-stopped'); - } - console.log('Microphone capture stopped successfully'); - } - catch (error) { - console.error('Failed to stop microphone capture:', error); - throw error; - } -} -// Error handler for audio processing -function handleAudioError(error, window) { - console.error('Audio processing error:', error); - stopMicrophoneCapture(window); - // Notify renderer of error if window still exists - if (!window.isDestroyed()) { - window.webContents.send('audio-error', error.message); - } -} -// Clean up resources when app is closing -function cleanupAudioCapture() { - const window = getFocusedWindow(); - if (window) { - stopMicrophoneCapture(window); - } -} diff --git a/dist/preload/preload.js b/dist/preload/preload.js deleted file mode 100644 index 730031f..0000000 --- a/dist/preload/preload.js +++ /dev/null @@ -1,28 +0,0 @@ -const { ipcRenderer } = require('electron'); -const { contextBridge } = require('electron'); -const audioContext = new (window.AudioContext || window.webkitAudioContext)(); -// Initialize IPC listeners for microphone access -ipcRenderer.on('request-microphone', async () => { - if (navigator.mediaDevices) { - return navigator.mediaDevices.getUserMedia({ audio: true }); - } - else { - console.error("MediaDevices API not supported"); - } - // Send the microphone stream back to the renderer - //event.sender.send('microphone-stream', stream); -}); -//@ts-nocheck -window.myApi = { - startMicrophone: () => { - alert(1); - }, - sendMessage: (message) => { - console.log('[preload] sendMessage called with:', message); - return ipcRenderer.send('message-from-renderer', message); - }, - receiveMessage: (callback) => { - console.log('[preload] receiveMessage registered with callback'); - return ipcRenderer.on('message-from-main', (event, arg) => callback(arg)); - }, -}; diff --git a/dist/renderer/index.js b/dist/renderer/index.js deleted file mode 100644 index 0a53067..0000000 --- a/dist/renderer/index.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const react_1 = __importDefault(require("react")); -const client_1 = __importDefault(require("react-dom/client")); -const App_1 = __importDefault(require("../components/App")); -console.log('[renderer] Initializing React app'); -client_1.default.createRoot(document.getElementById('root')).render(react_1.default.createElement(react_1.default.StrictMode, null, - react_1.default.createElement(App_1.default, null))); diff --git a/dist/services/openai.service.js b/dist/services/openai.service.js deleted file mode 100644 index 2b5df05..0000000 --- a/dist/services/openai.service.js +++ /dev/null @@ -1,150 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.OpenAIService = void 0; -const openai_1 = require("openai"); -const { Readable } = require('stream'); -class OpenAIService { - constructor() { - this.client = new openai_1.AzureOpenAI({ - dangerouslyAllowBrowser: true, - endpoint: process.env.AZURE_OPEN_AI_ENDPOINT || '', - apiVersion: process.env.OPENAI_API_VERSION || '2024-02-15-preview', - apiKey: process.env.AZURE_OPEN_AI_KEY || '' - }); - } - async transcribeAudio(audioBlob) { - try { - // Convert Blob to ArrayBuffer - const arrayBuffer = await audioBlob.arrayBuffer(); - // Convert Buffer to a Readable stream - const buffer = Buffer.from(arrayBuffer); - const stream = new Readable(); - stream.push(buffer); - stream.push(null); // Signal the end of the stream - const response = await this.client.audio.transcriptions.create({ - file: stream, - model: process.env.AZURE_OPEN_AI_WHISPER_MODEL || 'whisper-1', - language: 'en', - response_format: 'verbose_json' - }); - return { - text: response.text, - //@ts-ignore - segments: response.segments?.map(seg => ({ - text: seg.text, - start: seg.start, - end: seg.end - })) || [] - }; - } - catch (error) { - console.error('Error in transcribeAudio:', error); - throw new Error('Failed to transcribe audio'); - } - } - async analyzeScreenWithContext(context) { - try { - const response = await this.client.chat.completions.create({ - model: process.env.AZURE_OPEN_AI_VISION_MODEL || '', - messages: [ - { - role: 'system', - content: `You are an AI that analyzes screenshots and voice commands to determine user intentions for automation. - You should identify UI elements and return specific actions in JSON format. - Focus on the area near the field ${context.identifier}.` - }, - { - role: 'user', - content: [ - { - type: 'text', - text: `Analyze this screenshot with the following context: - Voice Command: "${context.transcription}" - Cursor Position: x=${context.cursorPosition.x}, y=${context.cursorPosition.y} - - Identify the most likely action based on the voice command and cursor position. - Return in format: { - "type": "click|type|move", - "identifier": "element-id or descriptive name", - "value": "text to type (for type actions)", - "confidence": 0-1, - "bounds": {"x": number, "y": number, "width": number, "height": number} - }` - }, - { - type: 'image_url', - image_url: { - url: `data:image/png;base64,${context.screenshot}` - } - } - ] - } - ], - max_tokens: 500, - temperature: 0.3 - }); - const result = JSON.parse(response.choices[0].message.content || '{}'); - return result; - } - catch (error) { - console.error('Error in analyzeScreenWithContext:', error); - throw new Error('Failed to analyze screen context'); - } - } - async analyzeScreen(screenshot) { - try { - const response = await this.client.chat.completions.create({ - model: process.env.AZURE_OPEN_AI_VISION_MODEL || '', - messages: [ - { - role: 'system', - content: 'You are an AI that analyzes screenshots to identify interactive UI elements and their properties.' - }, - { - role: 'user', - content: [ - { - type: 'text', - text: `Analyze this screenshot and identify all interactive elements (buttons, text fields, dropdowns, etc). - For each element, provide: - - Type of element - - Identifier or descriptive name - - Location and size - - Any visible text or labels - - State (focused, disabled, etc) - - Return in format: { - "elements": [{ - "type": "button|input|dropdown|etc", - "identifier": "element-id or descriptive name", - "bounds": {"x": number, "y": number, "width": number, "height": number}, - "text": "visible text", - "state": {"focused": boolean, "disabled": boolean} - }] - }` - }, - { - type: 'image_url', - image_url: { - url: `data:image/png;base64,${screenshot}` - } - } - ] - } - ], - max_tokens: 1000, - temperature: 0.3 - }); - const result = JSON.parse(response.choices[0].message.content || '{}'); - return { - elements: result.elements || [], - timestamp: Date.now() - }; - } - catch (error) { - console.error('Error in analyzeScreen:', error); - throw new Error('Failed to analyze screen'); - } - } -} -exports.OpenAIService = OpenAIService; diff --git a/dist/services/player.service.js b/dist/services/player.service.js deleted file mode 100644 index b348cad..0000000 --- a/dist/services/player.service.js +++ /dev/null @@ -1,87 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.PlayerService = void 0; -const electron_1 = require("electron"); -const openai_service_1 = require("../services/openai.service"); -class PlayerService { - constructor() { - this.currentScreenshot = ''; - this.isPlaying = false; - console.log('[PlayerService] Initializing'); - this.openAIService = new openai_service_1.OpenAIService(); - } - async executeBasicCode(code) { - console.log('[PlayerService] executeBasicCode called with:', code); - this.isPlaying = true; - const lines = code.split('\n'); - try { - for (const line of lines) { - if (!this.isPlaying) - break; - if (line.trim().startsWith('REM') || line.trim() === '') - continue; - const match = line.match(/^\d+\s+(\w+)\s+"([^"]+)"(?:\s+"([^"]+)")?/); - if (!match) - continue; - const [_, command, identifier, value] = match; - console.log('[PlayerService] Executing command:', { command, identifier, value }); - await this.captureAndAnalyzeScreen(); - await this.executeCommand(command, identifier, value); - await new Promise(resolve => setTimeout(resolve, 500)); - } - } - catch (error) { - console.error('[PlayerService] Execution error:', error); - this.isPlaying = false; - throw error; - } - } - async captureAndAnalyzeScreen() { - console.log('[PlayerService] captureAndAnalyzeScreen called'); - const sources = await electron_1.ipcRenderer.invoke('get-screenshot'); - this.currentScreenshot = sources[0].thumbnail; - } - async executeCommand(command, identifier, value) { - console.log('[PlayerService] executeCommand called with:', { command, identifier, value }); - const element = await this.openAIService.analyzeScreenWithContext({ - screenshot: this.currentScreenshot, - transcription: '', - identifier, cursorPosition: null - }); - //@ts-nocheck - if (!element) { - console.warn(`[PlayerService] Element not found: ${identifier}, retrying with fresh analysis`); - await this.captureAndAnalyzeScreen(); - const newElement = await this.openAIService.analyzeScreenWithContext({ - screenshot: this.currentScreenshot, - transcription: '', - cursorPosition: await electron_1.ipcRenderer.invoke('get-cursor-position'), - identifier - }); - if (!newElement) - throw new Error(`Element not found after retry: ${identifier}`); - } - const centerX = element.bounds.x + element.bounds.width / 2; - const centerY = element.bounds.y + element.bounds.height / 2; - switch (command) { - case 'CLICK': - console.log('[PlayerService] Simulating click at:', { centerX, centerY }); - await electron_1.ipcRenderer.invoke('simulate-click', { x: centerX, y: centerY }); - break; - case 'TYPE': - console.log('[PlayerService] Simulating type:', { centerX, centerY, value }); - await electron_1.ipcRenderer.invoke('simulate-click', { x: centerX, y: centerY }); - await electron_1.ipcRenderer.invoke('simulate-type', { text: value || '' }); - break; - case 'MOVE': - console.log('[PlayerService] Simulating move:', { centerX, centerY }); - await electron_1.ipcRenderer.invoke('simulate-move', { x: centerX, y: centerY }); - break; - } - } - stop() { - console.log('[PlayerService] Stopping playback'); - this.isPlaying = false; - } -} -exports.PlayerService = PlayerService; diff --git a/dist/services/recorder.service.js b/dist/services/recorder.service.js deleted file mode 100644 index fddca98..0000000 --- a/dist/services/recorder.service.js +++ /dev/null @@ -1,242 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.RecorderService = void 0; -const electron_1 = require("electron"); -const openai_service_1 = require("../services/openai.service"); -const path = __importStar(require("path")); -const fs = __importStar(require("fs")); -class RecorderService { - constructor() { - this.eventGroups = []; - this.currentEvents = []; - this.recording = false; - this.currentScreenshot = ''; - this.audioBuffer = []; - this.isListeningToMicrophone = false; - this.silenceTimer = null; - this.isProcessingAudio = false; - this.SILENCE_THRESHOLD = 0.01; - this.SILENCE_DURATION = 1500; // 1.5 seconds of silence to trigger processing - this.MIN_AUDIO_DURATION = 500; // Minimum audio duration to process - this.handleAudioLevel = (_, level) => { - if (!this.recording || !this.isListeningToMicrophone) - return; - if (level < this.SILENCE_THRESHOLD) { - if (!this.silenceTimer && !this.isProcessingAudio && this.audioBuffer.length > 0) { - this.silenceTimer = setTimeout(async () => { - if (this.recording) { - await this.processCapturedAudio(); - } - }, this.SILENCE_DURATION); - } - } - else { - if (this.silenceTimer) { - clearTimeout(this.silenceTimer); - this.silenceTimer = null; - } - } - }; - this.handleAudioChunk = (_, chunk) => { - if (!this.recording || !this.isListeningToMicrophone) - return; - this.audioBuffer.push(chunk); - }; - this.handleKeyboardEvent = async (_, event) => { - if (!this.recording) - return; - this.currentEvents.push({ - type: 'type', - identifier: event.key, - value: event.key, - timestamp: Date.now(), - narration: '' - }); - }; - this.handleMouseEvent = async (_, event) => { - if (!this.recording) - return; - const analysis = await this.openAIService.analyzeScreen(this.currentScreenshot); - const element = this.findElementAtPosition(analysis, event.clientX, event.clientY); - if (element) { - this.currentEvents.push({ - type: 'click', - identifier: element.identifier, - timestamp: Date.now(), - narration: '' - }); - } - }; - console.log('RecorderService.constructor()'); - this.openAIService = new openai_service_1.OpenAIService(); - this.tempDir = path.join(process.cwd(), 'temp_recordings'); - this.ensureTempDirectory(); - } - ensureTempDirectory() { - if (!fs.existsSync(this.tempDir)) { - fs.mkdirSync(this.tempDir, { recursive: true }); - } - } - async startRecording() { - console.log('RecorderService.startRecording()'); - try { - this.recording = true; - this.eventGroups = []; - this.currentEvents = []; - await this.startMicrophoneCapture(); - await this.captureInitialScreenshot(); - this.setupEventListeners(); - } - catch (error) { - console.error('RecorderService.startRecording() error:', error); - this.recording = false; - throw error; - } - } - getMicrophoneStream() { - if (typeof window !== 'undefined') { - //@ts-ignore - return window.getMicrophoneStream(); - } - return null; - } - async startMicrophoneCapture() { - console.log('RecorderService.startMicrophoneCapture()'); - try { - this.isListeningToMicrophone = true; - electron_1.ipcRenderer.on('audio-level', this.handleAudioLevel); - electron_1.ipcRenderer.on('audio-chunk', this.handleAudioChunk); - const stream = this.getMicrophoneStream(); - console.log('Got Stream'); - } - catch (error) { - console.error('Failed to start microphone capture:', error); - throw new Error(`Microphone initialization failed: ${error.message}`); - } - } - async processCapturedAudio() { - if (this.isProcessingAudio || this.audioBuffer.length === 0) - return; - this.isProcessingAudio = true; - const combinedBuffer = Buffer.concat(this.audioBuffer); - this.audioBuffer = []; // Clear the buffer - try { - const audioFilePath = path.join(this.tempDir, `audio-${Date.now()}.wav`); - fs.writeFileSync(audioFilePath, combinedBuffer); - const transcription = await this.openAIService.transcribeAudio(new Blob([combinedBuffer], { type: 'audio/wav' })); - if (transcription.text.trim()) { - await this.processNarrationWithEvents(transcription.text); - } - fs.unlinkSync(audioFilePath); - } - catch (error) { - console.error('Audio processing error:', error); - } - finally { - this.isProcessingAudio = false; - } - } - async processNarrationWithEvents(narration) { - if (this.currentEvents.length === 0) - return; - const eventGroup = { - narration, - events: [...this.currentEvents], - screenshot: this.currentScreenshot, - timestamp: Date.now() - }; - this.eventGroups.push(eventGroup); - this.currentEvents = []; // Clear current events for next group - await this.captureInitialScreenshot(); // Get fresh screenshot for next group - } - setupEventListeners() { - electron_1.ipcRenderer.on('keyboard-event', this.handleKeyboardEvent); - electron_1.ipcRenderer.on('mouse-event', this.handleMouseEvent); - } - async captureInitialScreenshot() { - const sources = await electron_1.ipcRenderer.invoke('get-screenshot'); - this.currentScreenshot = sources[0].thumbnail; - } - findElementAtPosition(analysis, x, y) { - return analysis.elements.find(element => { - const bounds = element.bounds; - return x >= bounds.x && - x <= bounds.x + bounds.width && - y >= bounds.y && - y <= bounds.y + bounds.height; - }); - } - async stopRecording() { - console.log('RecorderService.stopRecording()'); - // Process any remaining audio - if (this.audioBuffer.length > 0) { - await this.processCapturedAudio(); - } - this.cleanup(); - return this.generateBasicCode(); - } - cleanup() { - this.recording = false; - this.isListeningToMicrophone = false; - if (this.silenceTimer) { - clearTimeout(this.silenceTimer); - this.silenceTimer = null; - } - electron_1.ipcRenderer.removeListener('audio-level', this.handleAudioLevel); - electron_1.ipcRenderer.removeListener('audio-chunk', this.handleAudioChunk); - electron_1.ipcRenderer.removeListener('keyboard-event', this.handleKeyboardEvent); - electron_1.ipcRenderer.removeListener('mouse-event', this.handleMouseEvent); - // Cleanup temp directory - fs.readdirSync(this.tempDir).forEach(file => { - fs.unlinkSync(path.join(this.tempDir, file)); - }); - } - generateBasicCode() { - let basicCode = '10 REM BotDesktop Automation Script\n'; - let lineNumber = 20; - this.eventGroups.forEach(group => { - basicCode += `${lineNumber} REM ${group.narration}\n`; - lineNumber += 10; - group.events.forEach(event => { - switch (event.type) { - case 'click': - basicCode += `${lineNumber} CLICK "${event.identifier}"\n`; - break; - case 'type': - basicCode += `${lineNumber} TYPE "${event.identifier}" "${event.value}"\n`; - break; - case 'move': - basicCode += `${lineNumber} MOVE "${event.identifier}"\n`; - break; - } - lineNumber += 10; - }); - }); - basicCode += `${lineNumber} END\n`; - return basicCode; - } -} -exports.RecorderService = RecorderService; diff --git a/dist/services/types.js b/dist/services/types.js deleted file mode 100644 index c8ad2e5..0000000 --- a/dist/services/types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/dist/tests/services.test.js b/dist/tests/services.test.js deleted file mode 100644 index 59bb45a..0000000 --- a/dist/tests/services.test.js +++ /dev/null @@ -1 +0,0 @@ -// Tests for services diff --git a/src/components/App.tsx b/src/components/App.tsx index d61edf8..37b393a 100644 --- a/src/components/App.tsx +++ b/src/components/App.tsx @@ -2,8 +2,8 @@ import React, { useState } from 'react'; import { RecorderService } from '../services/recorder.service'; import { PlayerService } from '../services/player.service'; -const recorder = new RecorderService(); -const player = new PlayerService(); +const recorder = new RecorderService(window); +const player = new PlayerService(window); const App: React.FC = () => { const [recording, setRecording] = useState(false); diff --git a/src/main/main.ts b/src/main/main.ts index 47203f4..a1e33d7 100644 --- a/src/main/main.ts +++ b/src/main/main.ts @@ -1,9 +1,9 @@ +//@ts-nocheck + require('dotenv').config(); require('electron-require'); -import { app } from 'electron'; -import { BrowserWindow, desktopCapturer, ipcMain } from 'electron'; +import { app, BrowserWindow, desktopCapturer, ipcMain, systemPreferences } from 'electron'; import * as path from 'path'; -import { systemPreferences } from 'electron'; import { RecorderService } from '../services/recorder.service'; import { PlayerService } from '../services/player.service'; @@ -20,11 +20,19 @@ const audioCapture: AudioCapture = { audioStream: null, analyserNode: null, audioData: null, - isCapturing: false + isCapturing: false, }; -const recorder = new RecorderService(); -const player = new PlayerService(); +let recorder: RecorderService; +let player: PlayerService; + +function setup() { + // Perform any necessary setup here + const envSetup = require('dotenv').config(); + if (envSetup.error) { + throw envSetup.error; + } +} function createWindow() { const mainWindow = new BrowserWindow({ @@ -34,201 +42,102 @@ function createWindow() { center: true, maximizable: false, thickFrame: true, - autoHideMenuBar:true, + autoHideMenuBar: true, webPreferences: { experimentalFeatures: true, - nodeIntegrationInWorker: true, - nodeIntegration: true, - nodeIntegrationInSubFrames: true, - contextIsolation: false, - preload: path.join(__dirname, '../preload/preload.js') - } + nodeIntegration: false, + contextIsolation: true, + preload: path.join(__dirname, '../preload/preload.js'), + }, }); - - ipcMain.handle('request-microphone', async () => { - try { - const stream = await mainWindow.webContents.executeJavaScript(` - (async () => { - try { - const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); - return stream; - } catch (error) { - console.error('Error accessing microphone:', error); - throw error; - } - })(); - `); - return stream; // Return the stream to the UserService - } catch (error) { - console.error('Failed to get microphone stream:', error); - throw error; - } -}); - - - mainWindow.setAutoHideMenuBar(true); - mainWindow. setMaximizable(false); - - if (process.env.NODE_ENV === 'development') { mainWindow.loadURL('http://localhost:8080'); mainWindow.webContents.openDevTools(); } else { mainWindow.loadFile(path.join(__dirname, '../../src/renderer/index.html')); - } ipcMain.handle('mouse-event', recorder.handleMouseEvent.bind(recorder)); - - - ipcMain.handle('request-microphone', async () => { - try { - const stream = await mainWindow.webContents.executeJavaScript(` - (async () => { - try { - const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); - return stream; - } catch (error) { - console.error('Error accessing microphone:', error); - throw error; - } - })(); - `); - return stream; // Return the stream to the UserService - } catch (error) { - console.error('Failed to get microphone stream:', error); - throw error; - } -}); - + } + + recorder = new RecorderService(mainWindow); + player = new PlayerService(mainWindow); + ipcMain.handle('mouse-event', recorder.handleMouseEvent.bind(recorder)); ipcMain.handle('keyboard-event', recorder.handleKeyboardEvent.bind(recorder)); - - // Handler to capture the entire screen - ipcMain.handle('get-screenshot', async () => { - console.log('get-screenshot called'); - const sources = await desktopCapturer.getSources({ types: ['screen'] }); - const screenSource = sources[0]; // Get the first screen source - - const { thumbnail } = screenSource; // Thumbnail is a native image - return thumbnail.toPNG(); // Return the screenshot as PNG buffer - }); - - ipcMain.handle('start-recording', async () => { - console.log('start-recording called'); - await recorder.startRecording(); - }); - - ipcMain.handle('stop-recording', async () => { - console.log('stop-recording called'); - return await recorder.stopRecording(); - }); - - ipcMain.handle('execute-basic-code', async (_, code: string) => { - console.log('execute-basic-code called with:', code); - await player.executeBasicCode(code); - }); - - ipcMain.handle('check-microphone-permission', async () => { - console.log('check-microphone-permission called'); - if (process.platform === 'darwin') { - const status = await systemPreferences.getMediaAccessStatus('microphone'); - if (status !== 'granted') { - const success = await systemPreferences.askForMediaAccess('microphone'); - return success; - } - return true; - } - return true; // On Windows/Linux, permissions are handled by the OS - }); - - - ipcMain.handle('start-microphone-capture', async (event) => { - - const window = BrowserWindow.fromWebContents(event.sender); - if (!window) { - throw new Error('No window found for this request'); - } - return startMicrophoneCapture(window); - }); - - ipcMain.handle('stop-microphone-capture', async (event) => { - const window = BrowserWindow.fromWebContents(event.sender); - if (!window) { - throw new Error('No window found for this request'); - } - return stopMicrophoneCapture(window); - }); - - ipcMain.handle('start-microphone-capture', async (event, ...args) => { - // Perform asynchronous microphone capture logic here - try { - const result = await startMicrophoneCapture(args[0]); // Assuming this function is async - return result; - } catch (error) { - console.error("Error during microphone capture:", error); - throw error; // Send the error back to the renderer - } - }); - ipcMain.handle('stop-microphone-capture', async (event, ...args) => { - try { - const result = await stopMicrophoneCapture(args[0]); - return result; - } catch (error) { - console.error("Error stopping microphone capture:", error); - throw error; // Send the error back to the renderer - } - }); - } -app.whenReady().then(createWindow); - -app.on('window-all-closed', () => { - if (process.platform !== 'darwin') { - app.quit(); - } -}); - -app.on('activate', () => { - if (BrowserWindow.getAllWindows().length === 0) { - createWindow(); - } -}); - -// Enable required permissions -app.commandLine.appendSwitch('enable-speech-dispatcher'); +setupIPC(); -// Register cleanup on app quit -app.on('will-quit', cleanupAudioCapture); +function setupIPC() { + ipcMain.handle('start-recording', startRecording); + ipcMain.handle('stop-recording', stopRecording); + ipcMain.handle('execute-basic-code', executeBasicCode); + ipcMain.handle('check-microphone-permission', checkMicrophonePermission); + ipcMain.handle('start-microphone-capture', (event) => handleMicrophoneCapture(event, true)); + ipcMain.handle('stop-microphone-capture', (event) => handleMicrophoneCapture(event, false)); -// Function to get the focused window or first available window -function getFocusedWindow(): BrowserWindow | null { - const focusedWindow = BrowserWindow.getFocusedWindow(); - if (focusedWindow) return focusedWindow; - - const windows = BrowserWindow.getAllWindows(); - return windows.length > 0 ? windows[0] : null; + ipcMain.handle('get-screenshot', (event) => captureScreenshot(event)); } -// Function to safely send to window -function sendToWindow(channel: string, ...args: any[]) { - const window = getFocusedWindow(); - if (window && !window.isDestroyed()) { - window.webContents.send(channel, ...args); - } +async function startRecording() { + console.log('start-recording called'); + await recorder.startRecording(); } -async function startMicrophoneCapture(window: BrowserWindow): Promise { +async function stopRecording() { + console.log('stop-recording called'); + return await recorder.stopRecording(); +} + +async function executeBasicCode(_, code: string) { + console.log('execute-basic-code called with:', code); + await player.executeBasicCode(code); +} + +async function checkMicrophonePermission() { + console.log('check-microphone-permission called'); + if (process.platform === 'darwin') { + const status = await systemPreferences.getMediaAccessStatus('microphone'); + if (status !== 'granted') { + return await systemPreferences.askForMediaAccess('microphone'); + } + return true; + } + return true; // On Windows/Linux, permissions are handled by the OS +} + +async function handleMicrophoneCapture(event: Electron.IpcMainEvent, isStart: boolean) { + const window = BrowserWindow.fromWebContents(event.sender); + if (!window) { + throw new Error('No window found for this request'); + } + return isStart ? startMicrophoneCapture(window) : stopMicrophoneCapture(window); +} + +async function captureScreenshot(event) { + + console.log('handle screen'); + const sources = await desktopCapturer.getSources({ types: ['screen'] }); + window.document.getElementById('screenshot-image').src = sources[0].thumbnail.toDataURL(); +} + +async function startMicrophoneCapture(window: any): Promise { console.log('Starting microphone capture...'); - try { - navigator.mediaDevices; - // Request microphone access - //@ts-ignore - const stream = await window.myApi.startMicrophone() - + const stream = await mainWindow.webContents.executeJavaScript(` + (async () => { + try { + const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); + return stream; + } catch (error) { + console.error('Error accessing microphone:', error); + throw error; + } + })(); + `); + audioCapture.audioStream = stream; // Set up audio analysis @@ -242,13 +151,11 @@ async function startMicrophoneCapture(window: BrowserWindow): Promise { // Set up MediaRecorder audioCapture.mediaRecorder = new MediaRecorder(stream, { - mimeType: 'audio/webm;codecs=opus' + mimeType: 'audio/webm;codecs=opus', }); - // Handle audio data audioCapture.mediaRecorder.ondataavailable = (event: BlobEvent) => { if (event.data.size > 0 && !window.isDestroyed()) { - // Convert blob to buffer and send to renderer const reader = new FileReader(); reader.onloadend = () => { const buffer = Buffer.from(reader.result as ArrayBuffer); @@ -258,13 +165,11 @@ async function startMicrophoneCapture(window: BrowserWindow): Promise { } }; - // Start recording audioCapture.mediaRecorder.start(1000); // Capture in 1-second chunks audioCapture.isCapturing = true; // Start audio level monitoring monitorAudioLevels(window); - console.log('Microphone capture started successfully'); } catch (error) { console.error('Failed to start microphone capture:', error); @@ -277,26 +182,18 @@ function monitorAudioLevels(window: BrowserWindow) { return; } - // Get audio level data audioCapture.analyserNode.getByteFrequencyData(audioCapture.audioData); + const average = audioCapture.audioData.reduce((acc, value) => acc + value, 0) / audioCapture.audioData.length / 255; - // Calculate average volume level (0-1) - const average = audioCapture.audioData.reduce((acc, value) => acc + value, 0) / - audioCapture.audioData.length / - 255; - - // Send level to renderer if (!window.isDestroyed()) { window.webContents.send('audio-level', average); } - // Continue monitoring requestAnimationFrame(() => monitorAudioLevels(window)); } function stopMicrophoneCapture(window: BrowserWindow) { console.log('Stopping microphone capture...'); - try { if (audioCapture.mediaRecorder && audioCapture.mediaRecorder.state !== 'inactive') { audioCapture.mediaRecorder.stop(); @@ -327,21 +224,40 @@ function stopMicrophoneCapture(window: BrowserWindow) { } } -// Error handler for audio processing -function handleAudioError(error: Error, window: BrowserWindow): void { - console.error('Audio processing error:', error); - stopMicrophoneCapture(window); - - // Notify renderer of error if window still exists - if (!window.isDestroyed()) { - window.webContents.send('audio-error', error.message); - } -} - -// Clean up resources when app is closing -export function cleanupAudioCapture(): void { +// Cleanup when app quits +function cleanupAudioCapture(): void { const window = getFocusedWindow(); if (window) { stopMicrophoneCapture(window); } } + +function getFocusedWindow(): BrowserWindow | null { + const focusedWindow = BrowserWindow.getFocusedWindow(); + if (focusedWindow) return focusedWindow; + + const windows = BrowserWindow.getAllWindows(); + return windows.length > 0 ? windows[0] : null; +} + +// Setup the environment before creating the window +setup(); + +app.whenReady().then(createWindow); +app.on('window-all-closed', () => { + if (process.platform !== 'darwin') { + app.quit(); + } +}); + +app.on('activate', () => { + if (BrowserWindow.getAllWindows().length === 0) { + createWindow(); + } +}); + +// Enable required permissions +app.commandLine.appendSwitch('enable-speech-dispatcher'); + +// Register cleanup on app quit +app.on('will-quit', cleanupAudioCapture); diff --git a/src/preload/preload.ts b/src/preload/preload.ts index 049864f..11b9ebe 100644 --- a/src/preload/preload.ts +++ b/src/preload/preload.ts @@ -1,26 +1,16 @@ +//@ts-nocheck + const { ipcRenderer } = require('electron'); const { contextBridge } = require('electron'); -const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)(); - -// Initialize IPC listeners for microphone access -ipcRenderer.on('request-microphone', async () => { - - if (navigator.mediaDevices) { - return navigator.mediaDevices.getUserMedia({ audio: true }); - } else { - console.error("MediaDevices API not supported"); - } - - // Send the microphone stream back to the renderer - //event.sender.send('microphone-stream', stream); -}); - -//@ts-nocheck -(window as any).myApi = { +contextBridge.exposeInMainWorld('myAPI', { + send: (channel, data) => ipcRenderer.send(channel, data), + on: (channel, func) => { + ipcRenderer.on(channel, (event, ...args) => func(...args)); + }, startMicrophone: () => { - alert(1); + alert(2); }, sendMessage: (message: any) => { console.log('[preload] sendMessage called with:', message); @@ -29,6 +19,5 @@ ipcRenderer.on('request-microphone', async () => { receiveMessage: (callback: any) => { console.log('[preload] receiveMessage registered with callback'); return ipcRenderer.on('message-from-main', (event, arg) => callback(arg)); - }, -}; - + } +}); \ No newline at end of file diff --git a/src/renderer/index.html b/src/renderer/index.html index 9a7b1bf..3b8aa27 100644 --- a/src/renderer/index.html +++ b/src/renderer/index.html @@ -6,51 +6,137 @@ General Bots Desktop + - -
- \ No newline at end of file + diff --git a/src/services/player.service.ts b/src/services/player.service.ts index 09ace25..e1ce413 100644 --- a/src/services/player.service.ts +++ b/src/services/player.service.ts @@ -15,8 +15,10 @@ export class PlayerService { private openAIService: OpenAIService; private currentScreenshot: string = ''; private isPlaying: boolean = false; + window: any; - constructor() { + constructor(window: any) { + this.window = window; console.log('[PlayerService] Initializing'); this.openAIService = new OpenAIService(); } diff --git a/src/services/recorder.service.ts b/src/services/recorder.service.ts index 175712a..820e064 100644 --- a/src/services/recorder.service.ts +++ b/src/services/recorder.service.ts @@ -1,4 +1,4 @@ -import { ipcRenderer } from 'electron'; +import { desktopCapturer, ipcMain, ipcRenderer } from 'electron'; import { AutomationEvent, EventGroup, ScreenAnalysis, WhisperResponse } from '../services/types'; import { OpenAIService } from '../services/openai.service'; import * as path from 'path'; @@ -18,8 +18,10 @@ export class RecorderService { private SILENCE_THRESHOLD = 0.01; private SILENCE_DURATION = 1500; // 1.5 seconds of silence to trigger processing private MIN_AUDIO_DURATION = 500; // Minimum audio duration to process + window: any; - constructor() { + constructor(window: any) { + this.window = window; console.log('RecorderService.constructor()'); this.openAIService = new OpenAIService(); this.tempDir = path.join(process.cwd(), 'temp_recordings'); @@ -39,7 +41,9 @@ export class RecorderService { this.eventGroups = []; this.currentEvents = []; await this.startMicrophoneCapture(); - await this.captureInitialScreenshot(); + //@ts-ignore + const screen = await ipcRenderer.invoke('get-screenshot'); + console.log(screen); this.setupEventListeners(); } catch (error) { console.error('RecorderService.startRecording() error:', error); @@ -48,22 +52,22 @@ export class RecorderService { } } - getMicrophoneStream(): MediaStream | null { + getStream(): MediaStream | null { if (typeof window !== 'undefined') { //@ts-ignore - return window.getMicrophoneStream(); + return window.screenStream; } return null; -} + } + private async startMicrophoneCapture() { console.log('RecorderService.startMicrophoneCapture()'); try { this.isListeningToMicrophone = true; - ipcRenderer.on('audio-level', this.handleAudioLevel); - ipcRenderer.on('audio-chunk', this.handleAudioChunk); - const stream = this.getMicrophoneStream(); + await ipcRenderer.on('audio-level', this.handleAudioLevel); + await ipcRenderer.on('audio-chunk', this.handleAudioChunk); + await ipcRenderer.invoke('start-microphone-capture'); - console.log('Got Stream'); } catch (error) { console.error('Failed to start microphone capture:', error); @@ -72,6 +76,7 @@ export class RecorderService { } public handleAudioLevel = (_: any, level: number) => { + console.log('handleAudioLevel'); if (!this.recording || !this.isListeningToMicrophone) return; if (level < this.SILENCE_THRESHOLD) { @@ -91,6 +96,7 @@ export class RecorderService { } public handleAudioChunk = (_: any, chunk: Buffer) => { + console.log('handleAudioChunk'); if (!this.recording || !this.isListeningToMicrophone) return; this.audioBuffer.push(chunk); } @@ -134,7 +140,8 @@ export class RecorderService { this.eventGroups.push(eventGroup); this.currentEvents = []; // Clear current events for next group - await this.captureInitialScreenshot(); // Get fresh screenshot for next group + //@ts-ignore + await window.getSreenshot(); // Get fresh screenshot for next group } private setupEventListeners() { @@ -142,10 +149,6 @@ export class RecorderService { ipcRenderer.on('mouse-event', this.handleMouseEvent); } - private async captureInitialScreenshot() { - const sources = await ipcRenderer.invoke('get-screenshot'); - this.currentScreenshot = sources[0].thumbnail; - } public handleKeyboardEvent = async (_: any, event: KeyboardEvent) => { if (!this.recording) return; diff --git a/src/types/global.d.ts b/src/types/global.d.ts new file mode 100644 index 0000000..d9eb6d5 --- /dev/null +++ b/src/types/global.d.ts @@ -0,0 +1,6 @@ +/ types/global.d.ts +declare global { + interface Window { + screenStream: MediaStream | null; + } +} diff --git a/tsconfig.json b/tsconfig.json index dc3a8d5..93de1f3 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -12,6 +12,7 @@ "rootDir": "src", "moduleResolution": "node", "resolveJsonModule": true, + "baseUrl": ".", "paths": { "@/*": ["src/*"]