new (all): Screenshot.

This commit is contained in:
me@rodrigorodriguez.com 2024-10-27 19:57:03 -03:00
parent 0905970207
commit 53a75697c7
17 changed files with 282 additions and 1188 deletions

View file

@ -1,75 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const react_1 = __importStar(require("react"));
const recorder_service_1 = require("../services/recorder.service");
const player_service_1 = require("../services/player.service");
const recorder = new recorder_service_1.RecorderService();
const player = new player_service_1.PlayerService();
const App = () => {
const [recording, setRecording] = (0, react_1.useState)(false);
const [basicCode, setBasicCode] = (0, react_1.useState)('');
const handleStartRecording = async () => {
setRecording(true);
await recorder.startRecording();
};
const handleStopRecording = async () => {
//@ts-ignore
if (window.microphone) {
//@ts-ignore
window.stopMicrophone();
console.log('Microphone stopped');
}
setRecording(false);
const code = await recorder.stopRecording();
setBasicCode(code);
// Save to file
const blob = new Blob([code], { type: 'text/plain' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = 'automation.bas';
a.click();
};
const handlePlayback = async () => {
try {
await player.executeBasicCode(basicCode);
}
catch (error) {
console.error('Playback error:', error);
}
};
return (react_1.default.createElement("div", { className: "p-4 h-auto" },
react_1.default.createElement("h1", { className: "text-2xl font-bold mb-4" }, "General Bots Desktop"),
react_1.default.createElement("div", { className: "space-x-4 mb-4 h-auto" },
react_1.default.createElement("button", { id: "startBtn", className: `px-4 py-2 rounded ${recording ? 'bg-red-500' : 'bg-blue-500'} text-white`, onClick: recording ? handleStopRecording : handleStartRecording }, recording ? 'Stop Recording' : 'Start Recording'),
react_1.default.createElement("button", { id: "stopBtn", className: "px-4 py-2 rounded bg-green-500 text-white", onClick: handlePlayback, disabled: !basicCode }, "Play Recording")),
react_1.default.createElement("div", { className: "mt-4 h-20" },
react_1.default.createElement("h2", { className: "text-xl font-bold mb-2" }, "Generated BASIC Code:"),
react_1.default.createElement("pre", { className: "h-20 min-h-100 bg-gray-100 p-2 rounded border" }, basicCode)),
react_1.default.createElement("div", { className: "mb-4" },
react_1.default.createElement("a", { href: "https://github.com/General Bots" }, "General Bots"))));
};
exports.default = App;

313
dist/main/main.js vendored
View file

@ -1,313 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.cleanupAudioCapture = cleanupAudioCapture;
require('dotenv').config();
require('electron-require');
const electron_1 = require("electron");
const electron_2 = require("electron");
const path = __importStar(require("path"));
const electron_3 = require("electron");
const recorder_service_1 = require("../services/recorder.service");
const player_service_1 = require("../services/player.service");
const audioCapture = {
mediaRecorder: null,
audioStream: null,
analyserNode: null,
audioData: null,
isCapturing: false
};
const recorder = new recorder_service_1.RecorderService();
const player = new player_service_1.PlayerService();
function createWindow() {
const mainWindow = new electron_2.BrowserWindow({
width: 700,
height: 500,
backgroundColor: "grey",
center: true,
maximizable: false,
thickFrame: true,
autoHideMenuBar: true,
webPreferences: {
experimentalFeatures: true,
nodeIntegrationInWorker: true,
nodeIntegration: true,
nodeIntegrationInSubFrames: true,
contextIsolation: false,
preload: path.join(__dirname, '../preload/preload.js')
}
});
electron_2.ipcMain.handle('request-microphone', async () => {
try {
const stream = await mainWindow.webContents.executeJavaScript(`
(async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
return stream;
} catch (error) {
console.error('Error accessing microphone:', error);
throw error;
}
})();
`);
return stream; // Return the stream to the UserService
}
catch (error) {
console.error('Failed to get microphone stream:', error);
throw error;
}
});
mainWindow.setAutoHideMenuBar(true);
mainWindow.setMaximizable(false);
if (process.env.NODE_ENV === 'development') {
mainWindow.loadURL('http://localhost:8080');
mainWindow.webContents.openDevTools();
}
else {
mainWindow.loadFile(path.join(__dirname, '../../src/renderer/index.html'));
}
electron_2.ipcMain.handle('mouse-event', recorder.handleMouseEvent.bind(recorder));
electron_2.ipcMain.handle('request-microphone', async () => {
try {
const stream = await mainWindow.webContents.executeJavaScript(`
(async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
return stream;
} catch (error) {
console.error('Error accessing microphone:', error);
throw error;
}
})();
`);
return stream; // Return the stream to the UserService
}
catch (error) {
console.error('Failed to get microphone stream:', error);
throw error;
}
});
electron_2.ipcMain.handle('keyboard-event', recorder.handleKeyboardEvent.bind(recorder));
// Handler to capture the entire screen
electron_2.ipcMain.handle('get-screenshot', async () => {
console.log('get-screenshot called');
const sources = await electron_2.desktopCapturer.getSources({ types: ['screen'] });
const screenSource = sources[0]; // Get the first screen source
const { thumbnail } = screenSource; // Thumbnail is a native image
return thumbnail.toPNG(); // Return the screenshot as PNG buffer
});
electron_2.ipcMain.handle('start-recording', async () => {
console.log('start-recording called');
await recorder.startRecording();
});
electron_2.ipcMain.handle('stop-recording', async () => {
console.log('stop-recording called');
return await recorder.stopRecording();
});
electron_2.ipcMain.handle('execute-basic-code', async (_, code) => {
console.log('execute-basic-code called with:', code);
await player.executeBasicCode(code);
});
electron_2.ipcMain.handle('check-microphone-permission', async () => {
console.log('check-microphone-permission called');
if (process.platform === 'darwin') {
const status = await electron_3.systemPreferences.getMediaAccessStatus('microphone');
if (status !== 'granted') {
const success = await electron_3.systemPreferences.askForMediaAccess('microphone');
return success;
}
return true;
}
return true; // On Windows/Linux, permissions are handled by the OS
});
electron_2.ipcMain.handle('start-microphone-capture', async (event) => {
const window = electron_2.BrowserWindow.fromWebContents(event.sender);
if (!window) {
throw new Error('No window found for this request');
}
return startMicrophoneCapture(window);
});
electron_2.ipcMain.handle('stop-microphone-capture', async (event) => {
const window = electron_2.BrowserWindow.fromWebContents(event.sender);
if (!window) {
throw new Error('No window found for this request');
}
return stopMicrophoneCapture(window);
});
electron_2.ipcMain.handle('start-microphone-capture', async (event, ...args) => {
// Perform asynchronous microphone capture logic here
try {
const result = await startMicrophoneCapture(args[0]); // Assuming this function is async
return result;
}
catch (error) {
console.error("Error during microphone capture:", error);
throw error; // Send the error back to the renderer
}
});
electron_2.ipcMain.handle('stop-microphone-capture', async (event, ...args) => {
try {
const result = await stopMicrophoneCapture(args[0]);
return result;
}
catch (error) {
console.error("Error stopping microphone capture:", error);
throw error; // Send the error back to the renderer
}
});
}
electron_1.app.whenReady().then(createWindow);
electron_1.app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
electron_1.app.quit();
}
});
electron_1.app.on('activate', () => {
if (electron_2.BrowserWindow.getAllWindows().length === 0) {
createWindow();
}
});
// Enable required permissions
electron_1.app.commandLine.appendSwitch('enable-speech-dispatcher');
// Register cleanup on app quit
electron_1.app.on('will-quit', cleanupAudioCapture);
// Function to get the focused window or first available window
function getFocusedWindow() {
const focusedWindow = electron_2.BrowserWindow.getFocusedWindow();
if (focusedWindow)
return focusedWindow;
const windows = electron_2.BrowserWindow.getAllWindows();
return windows.length > 0 ? windows[0] : null;
}
// Function to safely send to window
function sendToWindow(channel, ...args) {
const window = getFocusedWindow();
if (window && !window.isDestroyed()) {
window.webContents.send(channel, ...args);
}
}
async function startMicrophoneCapture(window) {
console.log('Starting microphone capture...');
try {
navigator.mediaDevices;
// Request microphone access
//@ts-ignore
const stream = await window.myApi.startMicrophone();
audioCapture.audioStream = stream;
// Set up audio analysis
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
const sourceNode = audioContext.createMediaStreamSource(stream);
audioCapture.analyserNode = audioContext.createAnalyser();
audioCapture.analyserNode.fftSize = 2048;
sourceNode.connect(audioCapture.analyserNode);
audioCapture.audioData = new Uint8Array(audioCapture.analyserNode.frequencyBinCount);
// Set up MediaRecorder
audioCapture.mediaRecorder = new MediaRecorder(stream, {
mimeType: 'audio/webm;codecs=opus'
});
// Handle audio data
audioCapture.mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0 && !window.isDestroyed()) {
// Convert blob to buffer and send to renderer
const reader = new FileReader();
reader.onloadend = () => {
const buffer = Buffer.from(reader.result);
window.webContents.send('audio-chunk', buffer);
};
reader.readAsArrayBuffer(event.data);
}
};
// Start recording
audioCapture.mediaRecorder.start(1000); // Capture in 1-second chunks
audioCapture.isCapturing = true;
// Start audio level monitoring
monitorAudioLevels(window);
console.log('Microphone capture started successfully');
}
catch (error) {
console.error('Failed to start microphone capture:', error);
throw error;
}
}
function monitorAudioLevels(window) {
if (!audioCapture.isCapturing || !audioCapture.analyserNode || !audioCapture.audioData || window.isDestroyed()) {
return;
}
// Get audio level data
audioCapture.analyserNode.getByteFrequencyData(audioCapture.audioData);
// Calculate average volume level (0-1)
const average = audioCapture.audioData.reduce((acc, value) => acc + value, 0) /
audioCapture.audioData.length /
255;
// Send level to renderer
if (!window.isDestroyed()) {
window.webContents.send('audio-level', average);
}
// Continue monitoring
requestAnimationFrame(() => monitorAudioLevels(window));
}
function stopMicrophoneCapture(window) {
console.log('Stopping microphone capture...');
try {
if (audioCapture.mediaRecorder && audioCapture.mediaRecorder.state !== 'inactive') {
audioCapture.mediaRecorder.stop();
}
if (audioCapture.audioStream) {
audioCapture.audioStream.getTracks().forEach(track => track.stop());
}
if (audioCapture.analyserNode) {
audioCapture.analyserNode.disconnect();
}
audioCapture.isCapturing = false;
audioCapture.mediaRecorder = null;
audioCapture.audioStream = null;
audioCapture.analyserNode = null;
audioCapture.audioData = null;
if (!window.isDestroyed()) {
window.webContents.send('microphone-stopped');
}
console.log('Microphone capture stopped successfully');
}
catch (error) {
console.error('Failed to stop microphone capture:', error);
throw error;
}
}
// Error handler for audio processing
function handleAudioError(error, window) {
console.error('Audio processing error:', error);
stopMicrophoneCapture(window);
// Notify renderer of error if window still exists
if (!window.isDestroyed()) {
window.webContents.send('audio-error', error.message);
}
}
// Clean up resources when app is closing
function cleanupAudioCapture() {
const window = getFocusedWindow();
if (window) {
stopMicrophoneCapture(window);
}
}

View file

@ -1,28 +0,0 @@
const { ipcRenderer } = require('electron');
const { contextBridge } = require('electron');
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
// Initialize IPC listeners for microphone access
ipcRenderer.on('request-microphone', async () => {
if (navigator.mediaDevices) {
return navigator.mediaDevices.getUserMedia({ audio: true });
}
else {
console.error("MediaDevices API not supported");
}
// Send the microphone stream back to the renderer
//event.sender.send('microphone-stream', stream);
});
//@ts-nocheck
window.myApi = {
startMicrophone: () => {
alert(1);
},
sendMessage: (message) => {
console.log('[preload] sendMessage called with:', message);
return ipcRenderer.send('message-from-renderer', message);
},
receiveMessage: (callback) => {
console.log('[preload] receiveMessage registered with callback');
return ipcRenderer.on('message-from-main', (event, arg) => callback(arg));
},
};

View file

@ -1,11 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const react_1 = __importDefault(require("react"));
const client_1 = __importDefault(require("react-dom/client"));
const App_1 = __importDefault(require("../components/App"));
console.log('[renderer] Initializing React app');
client_1.default.createRoot(document.getElementById('root')).render(react_1.default.createElement(react_1.default.StrictMode, null,
react_1.default.createElement(App_1.default, null)));

View file

@ -1,150 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.OpenAIService = void 0;
const openai_1 = require("openai");
const { Readable } = require('stream');
class OpenAIService {
constructor() {
this.client = new openai_1.AzureOpenAI({
dangerouslyAllowBrowser: true,
endpoint: process.env.AZURE_OPEN_AI_ENDPOINT || '',
apiVersion: process.env.OPENAI_API_VERSION || '2024-02-15-preview',
apiKey: process.env.AZURE_OPEN_AI_KEY || ''
});
}
async transcribeAudio(audioBlob) {
try {
// Convert Blob to ArrayBuffer
const arrayBuffer = await audioBlob.arrayBuffer();
// Convert Buffer to a Readable stream
const buffer = Buffer.from(arrayBuffer);
const stream = new Readable();
stream.push(buffer);
stream.push(null); // Signal the end of the stream
const response = await this.client.audio.transcriptions.create({
file: stream,
model: process.env.AZURE_OPEN_AI_WHISPER_MODEL || 'whisper-1',
language: 'en',
response_format: 'verbose_json'
});
return {
text: response.text,
//@ts-ignore
segments: response.segments?.map(seg => ({
text: seg.text,
start: seg.start,
end: seg.end
})) || []
};
}
catch (error) {
console.error('Error in transcribeAudio:', error);
throw new Error('Failed to transcribe audio');
}
}
async analyzeScreenWithContext(context) {
try {
const response = await this.client.chat.completions.create({
model: process.env.AZURE_OPEN_AI_VISION_MODEL || '',
messages: [
{
role: 'system',
content: `You are an AI that analyzes screenshots and voice commands to determine user intentions for automation.
You should identify UI elements and return specific actions in JSON format.
Focus on the area near the field ${context.identifier}.`
},
{
role: 'user',
content: [
{
type: 'text',
text: `Analyze this screenshot with the following context:
Voice Command: "${context.transcription}"
Cursor Position: x=${context.cursorPosition.x}, y=${context.cursorPosition.y}
Identify the most likely action based on the voice command and cursor position.
Return in format: {
"type": "click|type|move",
"identifier": "element-id or descriptive name",
"value": "text to type (for type actions)",
"confidence": 0-1,
"bounds": {"x": number, "y": number, "width": number, "height": number}
}`
},
{
type: 'image_url',
image_url: {
url: `data:image/png;base64,${context.screenshot}`
}
}
]
}
],
max_tokens: 500,
temperature: 0.3
});
const result = JSON.parse(response.choices[0].message.content || '{}');
return result;
}
catch (error) {
console.error('Error in analyzeScreenWithContext:', error);
throw new Error('Failed to analyze screen context');
}
}
async analyzeScreen(screenshot) {
try {
const response = await this.client.chat.completions.create({
model: process.env.AZURE_OPEN_AI_VISION_MODEL || '',
messages: [
{
role: 'system',
content: 'You are an AI that analyzes screenshots to identify interactive UI elements and their properties.'
},
{
role: 'user',
content: [
{
type: 'text',
text: `Analyze this screenshot and identify all interactive elements (buttons, text fields, dropdowns, etc).
For each element, provide:
- Type of element
- Identifier or descriptive name
- Location and size
- Any visible text or labels
- State (focused, disabled, etc)
Return in format: {
"elements": [{
"type": "button|input|dropdown|etc",
"identifier": "element-id or descriptive name",
"bounds": {"x": number, "y": number, "width": number, "height": number},
"text": "visible text",
"state": {"focused": boolean, "disabled": boolean}
}]
}`
},
{
type: 'image_url',
image_url: {
url: `data:image/png;base64,${screenshot}`
}
}
]
}
],
max_tokens: 1000,
temperature: 0.3
});
const result = JSON.parse(response.choices[0].message.content || '{}');
return {
elements: result.elements || [],
timestamp: Date.now()
};
}
catch (error) {
console.error('Error in analyzeScreen:', error);
throw new Error('Failed to analyze screen');
}
}
}
exports.OpenAIService = OpenAIService;

View file

@ -1,87 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.PlayerService = void 0;
const electron_1 = require("electron");
const openai_service_1 = require("../services/openai.service");
class PlayerService {
constructor() {
this.currentScreenshot = '';
this.isPlaying = false;
console.log('[PlayerService] Initializing');
this.openAIService = new openai_service_1.OpenAIService();
}
async executeBasicCode(code) {
console.log('[PlayerService] executeBasicCode called with:', code);
this.isPlaying = true;
const lines = code.split('\n');
try {
for (const line of lines) {
if (!this.isPlaying)
break;
if (line.trim().startsWith('REM') || line.trim() === '')
continue;
const match = line.match(/^\d+\s+(\w+)\s+"([^"]+)"(?:\s+"([^"]+)")?/);
if (!match)
continue;
const [_, command, identifier, value] = match;
console.log('[PlayerService] Executing command:', { command, identifier, value });
await this.captureAndAnalyzeScreen();
await this.executeCommand(command, identifier, value);
await new Promise(resolve => setTimeout(resolve, 500));
}
}
catch (error) {
console.error('[PlayerService] Execution error:', error);
this.isPlaying = false;
throw error;
}
}
async captureAndAnalyzeScreen() {
console.log('[PlayerService] captureAndAnalyzeScreen called');
const sources = await electron_1.ipcRenderer.invoke('get-screenshot');
this.currentScreenshot = sources[0].thumbnail;
}
async executeCommand(command, identifier, value) {
console.log('[PlayerService] executeCommand called with:', { command, identifier, value });
const element = await this.openAIService.analyzeScreenWithContext({
screenshot: this.currentScreenshot,
transcription: '',
identifier, cursorPosition: null
});
//@ts-nocheck
if (!element) {
console.warn(`[PlayerService] Element not found: ${identifier}, retrying with fresh analysis`);
await this.captureAndAnalyzeScreen();
const newElement = await this.openAIService.analyzeScreenWithContext({
screenshot: this.currentScreenshot,
transcription: '',
cursorPosition: await electron_1.ipcRenderer.invoke('get-cursor-position'),
identifier
});
if (!newElement)
throw new Error(`Element not found after retry: ${identifier}`);
}
const centerX = element.bounds.x + element.bounds.width / 2;
const centerY = element.bounds.y + element.bounds.height / 2;
switch (command) {
case 'CLICK':
console.log('[PlayerService] Simulating click at:', { centerX, centerY });
await electron_1.ipcRenderer.invoke('simulate-click', { x: centerX, y: centerY });
break;
case 'TYPE':
console.log('[PlayerService] Simulating type:', { centerX, centerY, value });
await electron_1.ipcRenderer.invoke('simulate-click', { x: centerX, y: centerY });
await electron_1.ipcRenderer.invoke('simulate-type', { text: value || '' });
break;
case 'MOVE':
console.log('[PlayerService] Simulating move:', { centerX, centerY });
await electron_1.ipcRenderer.invoke('simulate-move', { x: centerX, y: centerY });
break;
}
}
stop() {
console.log('[PlayerService] Stopping playback');
this.isPlaying = false;
}
}
exports.PlayerService = PlayerService;

View file

@ -1,242 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.RecorderService = void 0;
const electron_1 = require("electron");
const openai_service_1 = require("../services/openai.service");
const path = __importStar(require("path"));
const fs = __importStar(require("fs"));
class RecorderService {
constructor() {
this.eventGroups = [];
this.currentEvents = [];
this.recording = false;
this.currentScreenshot = '';
this.audioBuffer = [];
this.isListeningToMicrophone = false;
this.silenceTimer = null;
this.isProcessingAudio = false;
this.SILENCE_THRESHOLD = 0.01;
this.SILENCE_DURATION = 1500; // 1.5 seconds of silence to trigger processing
this.MIN_AUDIO_DURATION = 500; // Minimum audio duration to process
this.handleAudioLevel = (_, level) => {
if (!this.recording || !this.isListeningToMicrophone)
return;
if (level < this.SILENCE_THRESHOLD) {
if (!this.silenceTimer && !this.isProcessingAudio && this.audioBuffer.length > 0) {
this.silenceTimer = setTimeout(async () => {
if (this.recording) {
await this.processCapturedAudio();
}
}, this.SILENCE_DURATION);
}
}
else {
if (this.silenceTimer) {
clearTimeout(this.silenceTimer);
this.silenceTimer = null;
}
}
};
this.handleAudioChunk = (_, chunk) => {
if (!this.recording || !this.isListeningToMicrophone)
return;
this.audioBuffer.push(chunk);
};
this.handleKeyboardEvent = async (_, event) => {
if (!this.recording)
return;
this.currentEvents.push({
type: 'type',
identifier: event.key,
value: event.key,
timestamp: Date.now(),
narration: ''
});
};
this.handleMouseEvent = async (_, event) => {
if (!this.recording)
return;
const analysis = await this.openAIService.analyzeScreen(this.currentScreenshot);
const element = this.findElementAtPosition(analysis, event.clientX, event.clientY);
if (element) {
this.currentEvents.push({
type: 'click',
identifier: element.identifier,
timestamp: Date.now(),
narration: ''
});
}
};
console.log('RecorderService.constructor()');
this.openAIService = new openai_service_1.OpenAIService();
this.tempDir = path.join(process.cwd(), 'temp_recordings');
this.ensureTempDirectory();
}
ensureTempDirectory() {
if (!fs.existsSync(this.tempDir)) {
fs.mkdirSync(this.tempDir, { recursive: true });
}
}
async startRecording() {
console.log('RecorderService.startRecording()');
try {
this.recording = true;
this.eventGroups = [];
this.currentEvents = [];
await this.startMicrophoneCapture();
await this.captureInitialScreenshot();
this.setupEventListeners();
}
catch (error) {
console.error('RecorderService.startRecording() error:', error);
this.recording = false;
throw error;
}
}
getMicrophoneStream() {
if (typeof window !== 'undefined') {
//@ts-ignore
return window.getMicrophoneStream();
}
return null;
}
async startMicrophoneCapture() {
console.log('RecorderService.startMicrophoneCapture()');
try {
this.isListeningToMicrophone = true;
electron_1.ipcRenderer.on('audio-level', this.handleAudioLevel);
electron_1.ipcRenderer.on('audio-chunk', this.handleAudioChunk);
const stream = this.getMicrophoneStream();
console.log('Got Stream');
}
catch (error) {
console.error('Failed to start microphone capture:', error);
throw new Error(`Microphone initialization failed: ${error.message}`);
}
}
async processCapturedAudio() {
if (this.isProcessingAudio || this.audioBuffer.length === 0)
return;
this.isProcessingAudio = true;
const combinedBuffer = Buffer.concat(this.audioBuffer);
this.audioBuffer = []; // Clear the buffer
try {
const audioFilePath = path.join(this.tempDir, `audio-${Date.now()}.wav`);
fs.writeFileSync(audioFilePath, combinedBuffer);
const transcription = await this.openAIService.transcribeAudio(new Blob([combinedBuffer], { type: 'audio/wav' }));
if (transcription.text.trim()) {
await this.processNarrationWithEvents(transcription.text);
}
fs.unlinkSync(audioFilePath);
}
catch (error) {
console.error('Audio processing error:', error);
}
finally {
this.isProcessingAudio = false;
}
}
async processNarrationWithEvents(narration) {
if (this.currentEvents.length === 0)
return;
const eventGroup = {
narration,
events: [...this.currentEvents],
screenshot: this.currentScreenshot,
timestamp: Date.now()
};
this.eventGroups.push(eventGroup);
this.currentEvents = []; // Clear current events for next group
await this.captureInitialScreenshot(); // Get fresh screenshot for next group
}
setupEventListeners() {
electron_1.ipcRenderer.on('keyboard-event', this.handleKeyboardEvent);
electron_1.ipcRenderer.on('mouse-event', this.handleMouseEvent);
}
async captureInitialScreenshot() {
const sources = await electron_1.ipcRenderer.invoke('get-screenshot');
this.currentScreenshot = sources[0].thumbnail;
}
findElementAtPosition(analysis, x, y) {
return analysis.elements.find(element => {
const bounds = element.bounds;
return x >= bounds.x &&
x <= bounds.x + bounds.width &&
y >= bounds.y &&
y <= bounds.y + bounds.height;
});
}
async stopRecording() {
console.log('RecorderService.stopRecording()');
// Process any remaining audio
if (this.audioBuffer.length > 0) {
await this.processCapturedAudio();
}
this.cleanup();
return this.generateBasicCode();
}
cleanup() {
this.recording = false;
this.isListeningToMicrophone = false;
if (this.silenceTimer) {
clearTimeout(this.silenceTimer);
this.silenceTimer = null;
}
electron_1.ipcRenderer.removeListener('audio-level', this.handleAudioLevel);
electron_1.ipcRenderer.removeListener('audio-chunk', this.handleAudioChunk);
electron_1.ipcRenderer.removeListener('keyboard-event', this.handleKeyboardEvent);
electron_1.ipcRenderer.removeListener('mouse-event', this.handleMouseEvent);
// Cleanup temp directory
fs.readdirSync(this.tempDir).forEach(file => {
fs.unlinkSync(path.join(this.tempDir, file));
});
}
generateBasicCode() {
let basicCode = '10 REM BotDesktop Automation Script\n';
let lineNumber = 20;
this.eventGroups.forEach(group => {
basicCode += `${lineNumber} REM ${group.narration}\n`;
lineNumber += 10;
group.events.forEach(event => {
switch (event.type) {
case 'click':
basicCode += `${lineNumber} CLICK "${event.identifier}"\n`;
break;
case 'type':
basicCode += `${lineNumber} TYPE "${event.identifier}" "${event.value}"\n`;
break;
case 'move':
basicCode += `${lineNumber} MOVE "${event.identifier}"\n`;
break;
}
lineNumber += 10;
});
});
basicCode += `${lineNumber} END\n`;
return basicCode;
}
}
exports.RecorderService = RecorderService;

View file

@ -1,2 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });

View file

@ -1 +0,0 @@
// Tests for services

View file

@ -2,8 +2,8 @@ import React, { useState } from 'react';
import { RecorderService } from '../services/recorder.service';
import { PlayerService } from '../services/player.service';
const recorder = new RecorderService();
const player = new PlayerService();
const recorder = new RecorderService(window);
const player = new PlayerService(window);
const App: React.FC = () => {
const [recording, setRecording] = useState(false);

View file

@ -1,9 +1,9 @@
//@ts-nocheck
require('dotenv').config();
require('electron-require');
import { app } from 'electron';
import { BrowserWindow, desktopCapturer, ipcMain } from 'electron';
import { app, BrowserWindow, desktopCapturer, ipcMain, systemPreferences } from 'electron';
import * as path from 'path';
import { systemPreferences } from 'electron';
import { RecorderService } from '../services/recorder.service';
import { PlayerService } from '../services/player.service';
@ -20,11 +20,19 @@ const audioCapture: AudioCapture = {
audioStream: null,
analyserNode: null,
audioData: null,
isCapturing: false
isCapturing: false,
};
const recorder = new RecorderService();
const player = new PlayerService();
let recorder: RecorderService;
let player: PlayerService;
function setup() {
// Perform any necessary setup here
const envSetup = require('dotenv').config();
if (envSetup.error) {
throw envSetup.error;
}
}
function createWindow() {
const mainWindow = new BrowserWindow({
@ -34,201 +42,102 @@ function createWindow() {
center: true,
maximizable: false,
thickFrame: true,
autoHideMenuBar:true,
autoHideMenuBar: true,
webPreferences: {
experimentalFeatures: true,
nodeIntegrationInWorker: true,
nodeIntegration: true,
nodeIntegrationInSubFrames: true,
contextIsolation: false,
preload: path.join(__dirname, '../preload/preload.js')
}
nodeIntegration: false,
contextIsolation: true,
preload: path.join(__dirname, '../preload/preload.js'),
},
});
ipcMain.handle('request-microphone', async () => {
try {
const stream = await mainWindow.webContents.executeJavaScript(`
(async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
return stream;
} catch (error) {
console.error('Error accessing microphone:', error);
throw error;
}
})();
`);
return stream; // Return the stream to the UserService
} catch (error) {
console.error('Failed to get microphone stream:', error);
throw error;
}
});
mainWindow.setAutoHideMenuBar(true);
mainWindow. setMaximizable(false);
if (process.env.NODE_ENV === 'development') {
mainWindow.loadURL('http://localhost:8080');
mainWindow.webContents.openDevTools();
} else {
mainWindow.loadFile(path.join(__dirname, '../../src/renderer/index.html'));
} ipcMain.handle('mouse-event', recorder.handleMouseEvent.bind(recorder));
ipcMain.handle('request-microphone', async () => {
try {
const stream = await mainWindow.webContents.executeJavaScript(`
(async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
return stream;
} catch (error) {
console.error('Error accessing microphone:', error);
throw error;
}
})();
`);
return stream; // Return the stream to the UserService
} catch (error) {
console.error('Failed to get microphone stream:', error);
throw error;
}
});
}
recorder = new RecorderService(mainWindow);
player = new PlayerService(mainWindow);
ipcMain.handle('mouse-event', recorder.handleMouseEvent.bind(recorder));
ipcMain.handle('keyboard-event', recorder.handleKeyboardEvent.bind(recorder));
// Handler to capture the entire screen
ipcMain.handle('get-screenshot', async () => {
console.log('get-screenshot called');
const sources = await desktopCapturer.getSources({ types: ['screen'] });
const screenSource = sources[0]; // Get the first screen source
const { thumbnail } = screenSource; // Thumbnail is a native image
return thumbnail.toPNG(); // Return the screenshot as PNG buffer
});
ipcMain.handle('start-recording', async () => {
console.log('start-recording called');
await recorder.startRecording();
});
ipcMain.handle('stop-recording', async () => {
console.log('stop-recording called');
return await recorder.stopRecording();
});
ipcMain.handle('execute-basic-code', async (_, code: string) => {
console.log('execute-basic-code called with:', code);
await player.executeBasicCode(code);
});
ipcMain.handle('check-microphone-permission', async () => {
console.log('check-microphone-permission called');
if (process.platform === 'darwin') {
const status = await systemPreferences.getMediaAccessStatus('microphone');
if (status !== 'granted') {
const success = await systemPreferences.askForMediaAccess('microphone');
return success;
}
return true;
}
return true; // On Windows/Linux, permissions are handled by the OS
});
ipcMain.handle('start-microphone-capture', async (event) => {
const window = BrowserWindow.fromWebContents(event.sender);
if (!window) {
throw new Error('No window found for this request');
}
return startMicrophoneCapture(window);
});
ipcMain.handle('stop-microphone-capture', async (event) => {
const window = BrowserWindow.fromWebContents(event.sender);
if (!window) {
throw new Error('No window found for this request');
}
return stopMicrophoneCapture(window);
});
ipcMain.handle('start-microphone-capture', async (event, ...args) => {
// Perform asynchronous microphone capture logic here
try {
const result = await startMicrophoneCapture(args[0]); // Assuming this function is async
return result;
} catch (error) {
console.error("Error during microphone capture:", error);
throw error; // Send the error back to the renderer
}
});
ipcMain.handle('stop-microphone-capture', async (event, ...args) => {
try {
const result = await stopMicrophoneCapture(args[0]);
return result;
} catch (error) {
console.error("Error stopping microphone capture:", error);
throw error; // Send the error back to the renderer
}
});
}
app.whenReady().then(createWindow);
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
app.quit();
}
});
app.on('activate', () => {
if (BrowserWindow.getAllWindows().length === 0) {
createWindow();
}
});
// Enable required permissions
app.commandLine.appendSwitch('enable-speech-dispatcher');
setupIPC();
// Register cleanup on app quit
app.on('will-quit', cleanupAudioCapture);
function setupIPC() {
ipcMain.handle('start-recording', startRecording);
ipcMain.handle('stop-recording', stopRecording);
ipcMain.handle('execute-basic-code', executeBasicCode);
ipcMain.handle('check-microphone-permission', checkMicrophonePermission);
ipcMain.handle('start-microphone-capture', (event) => handleMicrophoneCapture(event, true));
ipcMain.handle('stop-microphone-capture', (event) => handleMicrophoneCapture(event, false));
// Function to get the focused window or first available window
function getFocusedWindow(): BrowserWindow | null {
const focusedWindow = BrowserWindow.getFocusedWindow();
if (focusedWindow) return focusedWindow;
const windows = BrowserWindow.getAllWindows();
return windows.length > 0 ? windows[0] : null;
ipcMain.handle('get-screenshot', (event) => captureScreenshot(event));
}
// Function to safely send to window
function sendToWindow(channel: string, ...args: any[]) {
const window = getFocusedWindow();
if (window && !window.isDestroyed()) {
window.webContents.send(channel, ...args);
}
async function startRecording() {
console.log('start-recording called');
await recorder.startRecording();
}
async function startMicrophoneCapture(window: BrowserWindow): Promise<void> {
async function stopRecording() {
console.log('stop-recording called');
return await recorder.stopRecording();
}
async function executeBasicCode(_, code: string) {
console.log('execute-basic-code called with:', code);
await player.executeBasicCode(code);
}
async function checkMicrophonePermission() {
console.log('check-microphone-permission called');
if (process.platform === 'darwin') {
const status = await systemPreferences.getMediaAccessStatus('microphone');
if (status !== 'granted') {
return await systemPreferences.askForMediaAccess('microphone');
}
return true;
}
return true; // On Windows/Linux, permissions are handled by the OS
}
async function handleMicrophoneCapture(event: Electron.IpcMainEvent, isStart: boolean) {
const window = BrowserWindow.fromWebContents(event.sender);
if (!window) {
throw new Error('No window found for this request');
}
return isStart ? startMicrophoneCapture(window) : stopMicrophoneCapture(window);
}
async function captureScreenshot(event) {
console.log('handle screen');
const sources = await desktopCapturer.getSources({ types: ['screen'] });
window.document.getElementById('screenshot-image').src = sources[0].thumbnail.toDataURL();
}
async function startMicrophoneCapture(window: any): Promise<void> {
console.log('Starting microphone capture...');
try {
navigator.mediaDevices;
// Request microphone access
//@ts-ignore
const stream = await window.myApi.startMicrophone()
const stream = await mainWindow.webContents.executeJavaScript(`
(async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
return stream;
} catch (error) {
console.error('Error accessing microphone:', error);
throw error;
}
})();
`);
audioCapture.audioStream = stream;
// Set up audio analysis
@ -242,13 +151,11 @@ async function startMicrophoneCapture(window: BrowserWindow): Promise<void> {
// Set up MediaRecorder
audioCapture.mediaRecorder = new MediaRecorder(stream, {
mimeType: 'audio/webm;codecs=opus'
mimeType: 'audio/webm;codecs=opus',
});
// Handle audio data
audioCapture.mediaRecorder.ondataavailable = (event: BlobEvent) => {
if (event.data.size > 0 && !window.isDestroyed()) {
// Convert blob to buffer and send to renderer
const reader = new FileReader();
reader.onloadend = () => {
const buffer = Buffer.from(reader.result as ArrayBuffer);
@ -258,13 +165,11 @@ async function startMicrophoneCapture(window: BrowserWindow): Promise<void> {
}
};
// Start recording
audioCapture.mediaRecorder.start(1000); // Capture in 1-second chunks
audioCapture.isCapturing = true;
// Start audio level monitoring
monitorAudioLevels(window);
console.log('Microphone capture started successfully');
} catch (error) {
console.error('Failed to start microphone capture:', error);
@ -277,26 +182,18 @@ function monitorAudioLevels(window: BrowserWindow) {
return;
}
// Get audio level data
audioCapture.analyserNode.getByteFrequencyData(audioCapture.audioData);
const average = audioCapture.audioData.reduce((acc, value) => acc + value, 0) / audioCapture.audioData.length / 255;
// Calculate average volume level (0-1)
const average = audioCapture.audioData.reduce((acc, value) => acc + value, 0) /
audioCapture.audioData.length /
255;
// Send level to renderer
if (!window.isDestroyed()) {
window.webContents.send('audio-level', average);
}
// Continue monitoring
requestAnimationFrame(() => monitorAudioLevels(window));
}
function stopMicrophoneCapture(window: BrowserWindow) {
console.log('Stopping microphone capture...');
try {
if (audioCapture.mediaRecorder && audioCapture.mediaRecorder.state !== 'inactive') {
audioCapture.mediaRecorder.stop();
@ -327,21 +224,40 @@ function stopMicrophoneCapture(window: BrowserWindow) {
}
}
// Error handler for audio processing
function handleAudioError(error: Error, window: BrowserWindow): void {
console.error('Audio processing error:', error);
stopMicrophoneCapture(window);
// Notify renderer of error if window still exists
if (!window.isDestroyed()) {
window.webContents.send('audio-error', error.message);
}
}
// Clean up resources when app is closing
export function cleanupAudioCapture(): void {
// Cleanup when app quits
function cleanupAudioCapture(): void {
const window = getFocusedWindow();
if (window) {
stopMicrophoneCapture(window);
}
}
function getFocusedWindow(): BrowserWindow | null {
const focusedWindow = BrowserWindow.getFocusedWindow();
if (focusedWindow) return focusedWindow;
const windows = BrowserWindow.getAllWindows();
return windows.length > 0 ? windows[0] : null;
}
// Setup the environment before creating the window
setup();
app.whenReady().then(createWindow);
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
app.quit();
}
});
app.on('activate', () => {
if (BrowserWindow.getAllWindows().length === 0) {
createWindow();
}
});
// Enable required permissions
app.commandLine.appendSwitch('enable-speech-dispatcher');
// Register cleanup on app quit
app.on('will-quit', cleanupAudioCapture);

View file

@ -1,26 +1,16 @@
//@ts-nocheck
const { ipcRenderer } = require('electron');
const { contextBridge } = require('electron');
const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
// Initialize IPC listeners for microphone access
ipcRenderer.on('request-microphone', async () => {
if (navigator.mediaDevices) {
return navigator.mediaDevices.getUserMedia({ audio: true });
} else {
console.error("MediaDevices API not supported");
}
// Send the microphone stream back to the renderer
//event.sender.send('microphone-stream', stream);
});
//@ts-nocheck
(window as any).myApi = {
contextBridge.exposeInMainWorld('myAPI', {
send: (channel, data) => ipcRenderer.send(channel, data),
on: (channel, func) => {
ipcRenderer.on(channel, (event, ...args) => func(...args));
},
startMicrophone: () => {
alert(1);
alert(2);
},
sendMessage: (message: any) => {
console.log('[preload] sendMessage called with:', message);
@ -29,6 +19,5 @@ ipcRenderer.on('request-microphone', async () => {
receiveMessage: (callback: any) => {
console.log('[preload] receiveMessage registered with callback');
return ipcRenderer.on('message-from-main', (event, arg) => callback(arg));
},
};
}
});

View file

@ -6,51 +6,137 @@
<title>General Bots Desktop</title>
<script>var global = global || window;</script>
<script src="https://cdn.tailwindcss.com"></script>
</head>
<body>
<script>
document.addEventListener('DOMContentLoaded', () => {
const stream = navigator.mediaDevices.getUserMedia({
audio: true,
video: false
}).then(stream => {
alert(1);
// Now you have access to the stream
window.microphone = stream;
// You can store it in a global variable
window.getMicrophoneStream = () => stream;
<script defer>
// Or expose it through a global function
window.stopMicrophone = () => {
stream.getTracks().forEach(track => track.stop());
window.microphone = null;
};
}).catch(error => {
console.error('Error accessing microphone:', error);
});
window.addEventListener('load', async() => {
await new Promise(resolve => setTimeout(resolve, 1000));
console.log('HTML loaded.');
const startBtn = document.getElementById('startBtn');
const stopBtn = document.getElementById('stopBtn');
const startBtn = document.getElementById('startBtn');
const stopBtn = document.getElementById('stopBtn');
startBtn.addEventListener('click', async () => {
try {
await navigator.mediaDevices.getUserMedia({
audio: true,
video: false
}).then(stream => {
window.microphone = stream;
console.log('Microphone started');
});
} catch (error) {
console.error('Failed to start microphone:', error);
// Microphone.
navigator.mediaDevices.getUserMedia({
audio: true,
video: false
}).then(stream => {
// Now you have access to the stream
window.microphone = stream;
// Store in a global variable
window.getMicrophoneStream = () => stream;
// Expose it through a global function
window.stopMicrophone = () => {
stream.getTracks().forEach(track => track.stop());
window.microphone = null;
};
}).catch(error => {
console.error('Error accessing microphone:', error);
});
startBtn.addEventListener('click', async () => {
try {
await navigator.mediaDevices.getUserMedia({
audio: true,
video: false
}).then(stream => {
window.microphone = stream;
console.log('Microphone started');
});
} catch (error) {
console.error('Failed to start microphone:', error);
}
});
// Screenshot
function selectSource(source) {
navigator.mediaDevices.getUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id
}
}
})
.then((stream) => {
window.screenStream = stream;
const video = document.getElementById('preview');
video.srcObject = stream;
})
.catch((error) => {
console.error('Error selecting source:', error);
});
}
});
});
function stopCapture() {
if (window.screenStream) {
window.screenStream.getTracks().forEach(track => track.stop());
window.screenStream = null;
const video = document.getElementById('preview');
video.srcObject = null;
document.getElementById('stopBtn').disabled = true;
document.getElementById('screenshotBtn').disabled = true;
}
}
function takeScreenshot() {
const stream = this.getStream();
if (!stream) {
throw new Error('No active screen capture');
}
const video = document.createElement('video');
video.srcObject = stream;
return new Promise((resolve, reject) => {
video.onloadedmetadata = () => {
video.play();
video.pause();
const canvas = document.createElement('canvas');
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
const context = canvas.getContext('2d');
if (!context) {
reject(new Error('Failed to get canvas context'));
return;
}
context.drawImage(video, 0, 0, canvas.width, canvas.height);
canvas.toBlob((blob) => {
if (blob) {
resolve(blob);
} else {
reject(new Error('Failed to convert canvas to blob'));
}
video.srcObject = null;
}, 'image/png');
};
video.onerror = () => {
reject(new Error('Failed to load video'));
};
});
}
}); // End of DOMContentLoaded listener
</script>
<div id="root"></div>
</body>
</html>
</html>

View file

@ -15,8 +15,10 @@ export class PlayerService {
private openAIService: OpenAIService;
private currentScreenshot: string = '';
private isPlaying: boolean = false;
window: any;
constructor() {
constructor(window: any) {
this.window = window;
console.log('[PlayerService] Initializing');
this.openAIService = new OpenAIService();
}

View file

@ -1,4 +1,4 @@
import { ipcRenderer } from 'electron';
import { desktopCapturer, ipcMain, ipcRenderer } from 'electron';
import { AutomationEvent, EventGroup, ScreenAnalysis, WhisperResponse } from '../services/types';
import { OpenAIService } from '../services/openai.service';
import * as path from 'path';
@ -18,8 +18,10 @@ export class RecorderService {
private SILENCE_THRESHOLD = 0.01;
private SILENCE_DURATION = 1500; // 1.5 seconds of silence to trigger processing
private MIN_AUDIO_DURATION = 500; // Minimum audio duration to process
window: any;
constructor() {
constructor(window: any) {
this.window = window;
console.log('RecorderService.constructor()');
this.openAIService = new OpenAIService();
this.tempDir = path.join(process.cwd(), 'temp_recordings');
@ -39,7 +41,9 @@ export class RecorderService {
this.eventGroups = [];
this.currentEvents = [];
await this.startMicrophoneCapture();
await this.captureInitialScreenshot();
//@ts-ignore
const screen = await ipcRenderer.invoke('get-screenshot');
console.log(screen);
this.setupEventListeners();
} catch (error) {
console.error('RecorderService.startRecording() error:', error);
@ -48,22 +52,22 @@ export class RecorderService {
}
}
getMicrophoneStream(): MediaStream | null {
getStream(): MediaStream | null {
if (typeof window !== 'undefined') {
//@ts-ignore
return window.getMicrophoneStream();
return window.screenStream;
}
return null;
}
}
private async startMicrophoneCapture() {
console.log('RecorderService.startMicrophoneCapture()');
try {
this.isListeningToMicrophone = true;
ipcRenderer.on('audio-level', this.handleAudioLevel);
ipcRenderer.on('audio-chunk', this.handleAudioChunk);
const stream = this.getMicrophoneStream();
await ipcRenderer.on('audio-level', this.handleAudioLevel);
await ipcRenderer.on('audio-chunk', this.handleAudioChunk);
await ipcRenderer.invoke('start-microphone-capture');
console.log('Got Stream');
} catch (error) {
console.error('Failed to start microphone capture:', error);
@ -72,6 +76,7 @@ export class RecorderService {
}
public handleAudioLevel = (_: any, level: number) => {
console.log('handleAudioLevel');
if (!this.recording || !this.isListeningToMicrophone) return;
if (level < this.SILENCE_THRESHOLD) {
@ -91,6 +96,7 @@ export class RecorderService {
}
public handleAudioChunk = (_: any, chunk: Buffer) => {
console.log('handleAudioChunk');
if (!this.recording || !this.isListeningToMicrophone) return;
this.audioBuffer.push(chunk);
}
@ -134,7 +140,8 @@ export class RecorderService {
this.eventGroups.push(eventGroup);
this.currentEvents = []; // Clear current events for next group
await this.captureInitialScreenshot(); // Get fresh screenshot for next group
//@ts-ignore
await window.getSreenshot(); // Get fresh screenshot for next group
}
private setupEventListeners() {
@ -142,10 +149,6 @@ export class RecorderService {
ipcRenderer.on('mouse-event', this.handleMouseEvent);
}
private async captureInitialScreenshot() {
const sources = await ipcRenderer.invoke('get-screenshot');
this.currentScreenshot = sources[0].thumbnail;
}
public handleKeyboardEvent = async (_: any, event: KeyboardEvent) => {
if (!this.recording) return;

6
src/types/global.d.ts vendored Normal file
View file

@ -0,0 +1,6 @@
/ types/global.d.ts
declare global {
interface Window {
screenStream: MediaStream | null;
}
}

View file

@ -12,6 +12,7 @@
"rootDir": "src",
"moduleResolution": "node",
"resolveJsonModule": true,
"baseUrl": ".",
"paths": {
"@/*": ["src/*"]