new(all): Initial import.

This commit is contained in:
me@rodrigorodriguez.com 2024-10-26 21:21:51 -03:00
parent 5ebde5b646
commit c6f0933bfd
17 changed files with 542 additions and 66 deletions

View file

337
cimp.sh Executable file
View file

@ -0,0 +1,337 @@
#!/bin/bash
# Create project directories
mkdir -p ./src/preload
mkdir -p ./src/renderer
mkdir -p ./src/services
# Create preload.ts file
cat <<EOL > ./src/preload/preload.ts
// File: ./src/preload/preload.ts
const { ipcRenderer } = require('electron');
//@ts-nocheck
(window as any).myApi = {
//@ts-nocheck
sendMessage: (message: any) => {
console.log('preload.sendMessage', { message });
ipcRenderer.send('message-from-renderer', message);
},
//@ts-nocheck
receiveMessage: (callback: any) => {
console.log('preload.receiveMessage', { callback });
ipcRenderer.on('message-from-main', (event, arg) => callback(arg));
},
};
EOL
# Create index.tsx file
cat <<EOL > ./src/renderer/index.tsx
// File: ./src/renderer/index.tsx
import React from 'react';
import ReactDOM from 'react-dom/client';
import App from '../components/App';
ReactDOM.createRoot(
document.getElementById('root') as HTMLElement
).render(
<React.StrictMode>
<App />
</React.StrictMode>
);
EOL
# Create player.service.ts file
cat <<EOL > ./src/services/player.service.ts
// File: ./src/services/player.service.ts
import { ipcMain } from 'electron';
import { AutomationEvent, ScreenAnalysis } from './types';
import { OpenAIService } from './openai.service';
export class PlayerService {
private openAIService: OpenAIService;
constructor() {
console.log('PlayerService.constructor', {});
this.openAIService = new OpenAIService();
}
async executeBasicCode(code: string) {
console.log('PlayerService.executeBasicCode', { code });
const lines = code.split('\\n');
for (const line of lines) {
if (line.trim().startsWith('REM') || line.trim() === '') continue;
const match = line.match(/^\\d+\\s+(\\w+)\\s+"([^"]+)"(?:\\s+"([^"]+)")?/);
if (!match) continue;
const [_, command, identifier, value] = match;
await this.executeCommand(command, identifier, value);
await new Promise(resolve => setTimeout(resolve, 500));
}
}
private async executeCommand(command: string, identifier: string, value?: string) {
console.log('PlayerService.executeCommand', { command, identifier, value });
const screenshotPath = await this.captureScreen();
const analysis = await this.openAIService.analyzeScreen(screenshotPath);
const element = analysis.elements.find(e => e.identifier === identifier);
if (!element) throw new Error(\`Element not found: \${identifier}\`);
const centerX = element.bounds.x + element.bounds.width / 2;
const centerY = element.bounds.y + element.bounds.height / 2;
switch (command) {
case 'CLICK':
await this.simulateClick(centerX, centerY);
break;
case 'TYPE':
await this.simulateClick(centerX, centerY);
await this.simulateTyping(value || '');
break;
}
}
private async captureScreen(): Promise<string> {
console.log('PlayerService.captureScreen', {});
return new Promise((resolve, reject) => {
ipcMain.once('screen-captured', (_, screenshotPath) => {
resolve(screenshotPath);
});
ipcMain.emit('capture-screen');
});
}
private async simulateClick(x: number, y: number): Promise<void> {
console.log('PlayerService.simulateClick', { x, y });
return new Promise((resolve) => {
ipcMain.once('click-completed', () => {
resolve();
});
ipcMain.emit('simulate-click', { x, y });
});
}
private async simulateTyping(text: string): Promise<void> {
console.log('PlayerService.simulateTyping', { text });
return new Promise((resolve) => {
ipcMain.once('typing-completed', () => {
resolve();
});
ipcMain.emit('simulate-typing', { text });
});
}
}
EOL
# Create types.ts file
cat <<EOL > ./src/services/types.ts
// File: ./src/services/types.ts
export interface AutomationAction {
type: 'click' | 'type' | 'move';
identifier: string;
value?: string;
confidence: number;
bounds: {
x: number;
y: number;
width: number;
height: number;
};
}
export interface AutomationEvent {
type: 'click' | 'type' | 'move';
identifier: string;
value?: string;
timestamp: number;
narration: string;
}
export interface WhisperResponse {
text: string;
segments: any;
}
export interface ScreenContext {
screenshot: string;
transcription: string;
cursorPosition: { x: number, y: number };
}
export interface ScreenAnalysis {
timestamp: number,
elements: {
identifier: string;
type: string;
bounds: { x: number; y: number; width: number; height: number };
value?: string;
}[];
}
EOL
# Create recorder.service.ts file
cat <<EOL > ./src/services/recorder.service.ts
// File: ./src/services/recorder.service.ts
const { ipcRenderer } = require('electron'); // Require ipcRender
import { AutomationEvent, ScreenAnalysis, WhisperResponse } from '../services/types';
import { OpenAIService } from '../services/openai.service';
import * as path from 'path';
import * as fs from 'fs';
export class RecorderService {
private events: AutomationEvent[] = [];
private recording: boolean = false;
private openAIService: OpenAIService;
private currentScreenshot: string = '';
private lastTranscription: string = '';
private recordingProcess: any = null;
private tempDir: string;
private currentAudioFile: string = '';
private silenceTimer: NodeJS.Timeout | null = null;
private isProcessingAudio: boolean = false;
constructor() {
console.log('RecorderService.constructor', {});
this.openAIService = new OpenAIService();
this.tempDir = path.join(process.cwd(), 'temp_recordings');
if (!fs.existsSync(this.tempDir)) {
fs.mkdirSync(this.tempDir, { recursive: true });
}
}
public async startRecording() {
console.log('RecorderService.startRecording', {});
try {
this.recording = true;
this.events = [];
await this.setupAudioRecording();
await this.requestScreenshot();
ipcRenderer.on('keyboard-event', this.keyboardHandleEvent);
} catch (error) {
console.error('Failed to start recording:', error);
this.recording = false;
throw error;
}
}
private async setupAudioRecording() {
console.log('RecorderService.setupAudioRecording', {});
try {
ipcRenderer.on('audio-level', this.handleAudioLevel);
ipcRenderer.on('audio-chunk', this.handleAudioChunk);
} catch (error) {
console.error('Error setting up audio recording:', error);
throw new Error(\`Failed to setup audio recording: \${error.message}\`);
}
}
private handleAudioLevel = async (_: any, level: number) => {
console.log('RecorderService.handleAudioLevel', { level });
if (!this.recording) return;
const SILENCE_THRESHOLD = 0.01;
const SILENCE_DURATION = 1000;
if (level < SILENCE_THRESHOLD) {
if (!this.silenceTimer && !this.isProcessingAudio) {
this.silenceTimer = setTimeout(async () => {
if (this.recording) {
await this.processSilence();
}
}, SILENCE_DURATION);
}
} else {
if (this.silenceTimer) {
clearTimeout(this.silenceTimer);
this.silenceTimer = null;
}
}
}
private handleAudioChunk = async (_: any, chunk: Buffer) => {
console.log('RecorderService.handleAudioChunk', { chunk });
if (!this.recording) return;
try {
const audioFilePath = path.join(this.tempDir, \`audio-\${Date.now()}.wav\`);
fs.writeFileSync(audioFilePath, chunk);
if (this.silenceTimer) {
clearTimeout(this.silenceTimer);
this.silenceTimer = null;
await this.processAudioFile(audioFilePath);
}
} catch (error) {
console.error('Error handling audio chunk:', error);
}
};
private async processSilence() {
console.log('RecorderService.processSilence', {});
if (this.isProcessingAudio) return;
this.isProcessingAudio = true;
try {
const audioFilePath = await ipcRenderer.invoke('save-audio-chunk');
if (audioFilePath) {
this.currentAudioFile = audioFilePath;
await this.processAudioFile(audioFilePath);
await this.requestScreenshot();
}
} catch (error) {
console.error('Error processing silence:', error);
} finally {
this.isProcessingAudio = false;
}
}
private async processAudioFile(audioFilePath: string) {
console.log('RecorderService.processAudioFile', { audioFilePath });
const transcription = await this.openAIService.transcribeAudio(audioFilePath);
this.lastTranscription = transcription;
await this.requestScreenshot();
}
private async requestScreenshot() {
console.log('RecorderService.requestScreenshot', {});
await ipcRenderer.invoke('request-screenshot');
}
private keyboardHandleEvent = async (_: any, event: any) => {
console.log('RecorderService.keyboardHandleEvent', { event });
if (!this.recording) return;
const automationEvent: AutomationEvent = {
type: 'keyboard',
identifier: event.key,
timestamp: Date.now(),
narration: this.lastTranscription,
};
this.events.push(automationEvent);
};
public async stopRecording() {
console.log('RecorderService.stopRecording', {});
try {
this.recording = false;
ipcRenderer.removeListener('keyboard-event', this.keyboardHandleEvent);
await ipcRenderer.invoke('stop-audio-recording');
} catch (error) {
console.error('Failed to stop recording:', error);
}
}
}
EOL

20
dist/main/main.js vendored
View file

@ -24,9 +24,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
require('dotenv').config(); require('dotenv').config();
require('electron-require');
const electron_1 = require("electron"); const electron_1 = require("electron");
const path = __importStar(require("path")); const path = __importStar(require("path"));
// In main.ts
const electron_2 = require("electron"); const electron_2 = require("electron");
const recorder_service_1 = require("../services/recorder.service"); const recorder_service_1 = require("../services/recorder.service");
const player_service_1 = require("../services/player.service"); const player_service_1 = require("../services/player.service");
@ -37,7 +37,9 @@ function createWindow() {
width: 1200, width: 1200,
height: 800, height: 800,
webPreferences: { webPreferences: {
nodeIntegrationInWorker: true,
nodeIntegration: true, nodeIntegration: true,
nodeIntegrationInSubFrames: true,
contextIsolation: false, contextIsolation: false,
preload: path.join(__dirname, '../preload/preload.js') preload: path.join(__dirname, '../preload/preload.js')
} }
@ -64,17 +66,28 @@ electron_1.app.on('activate', () => {
electron_1.ipcMain.handle('mouse-event', recorder.mouseHandleEvent.bind(recorder)); electron_1.ipcMain.handle('mouse-event', recorder.mouseHandleEvent.bind(recorder));
electron_1.ipcMain.handle('keyboard-event', recorder.keyboardHandleEvent.bind(recorder)); electron_1.ipcMain.handle('keyboard-event', recorder.keyboardHandleEvent.bind(recorder));
electron_1.ipcMain.handle('screenshot-captured', recorder.screenshotHandleEvent.bind(recorder)); electron_1.ipcMain.handle('screenshot-captured', recorder.screenshotHandleEvent.bind(recorder));
// Handler to capture the entire screen
electron_1.ipcMain.handle('get-screenshot', async () => {
console.log('get-screenshot called');
const sources = await electron_1.desktopCapturer.getSources({ types: ['screen'] });
const screenSource = sources[0]; // Get the first screen source
const { thumbnail } = screenSource; // Thumbnail is a native image
return thumbnail.toPNG(); // Return the screenshot as PNG buffer
});
electron_1.ipcMain.handle('start-recording', async () => { electron_1.ipcMain.handle('start-recording', async () => {
console.log('start-recording called');
await recorder.startRecording(); await recorder.startRecording();
}); });
electron_1.ipcMain.handle('stop-recording', async () => { electron_1.ipcMain.handle('stop-recording', async () => {
console.log('stop-recording called');
return await recorder.stopRecording(); return await recorder.stopRecording();
}); });
electron_1.ipcMain.handle('execute-basic-code', async (_, code) => { electron_1.ipcMain.handle('execute-basic-code', async (_, code) => {
console.log('execute-basic-code called with:', code);
await player.executeBasicCode(code); await player.executeBasicCode(code);
}); });
// Add microphone permission check for macOS
electron_1.ipcMain.handle('check-microphone-permission', async () => { electron_1.ipcMain.handle('check-microphone-permission', async () => {
console.log('check-microphone-permission called');
if (process.platform === 'darwin') { if (process.platform === 'darwin') {
const status = await electron_2.systemPreferences.getMediaAccessStatus('microphone'); const status = await electron_2.systemPreferences.getMediaAccessStatus('microphone');
if (status !== 'granted') { if (status !== 'granted') {
@ -83,8 +96,7 @@ electron_1.ipcMain.handle('check-microphone-permission', async () => {
} }
return true; return true;
} }
// On Windows/Linux, permissions are handled by the OS return true; // On Windows/Linux, permissions are handled by the OS
return true;
}); });
// Enable required permissions // Enable required permissions
electron_1.app.commandLine.appendSwitch('enable-speech-dispatcher'); electron_1.app.commandLine.appendSwitch('enable-speech-dispatcher');

View file

@ -0,0 +1,12 @@
const { ipcRenderer } = require('electron');
//@ts-nocheck
window.myApi = {
sendMessage: (message) => {
console.log('[preload] sendMessage called with:', message);
return ipcRenderer.send('message-from-renderer', message);
},
receiveMessage: (callback) => {
console.log('[preload] receiveMessage registered with callback');
return ipcRenderer.on('message-from-main', (event, arg) => callback(arg));
},
};

View file

@ -6,5 +6,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
const react_1 = __importDefault(require("react")); const react_1 = __importDefault(require("react"));
const client_1 = __importDefault(require("react-dom/client")); const client_1 = __importDefault(require("react-dom/client"));
const App_1 = __importDefault(require("../components/App")); const App_1 = __importDefault(require("../components/App"));
console.log('[renderer] Initializing React app');
client_1.default.createRoot(document.getElementById('root')).render(react_1.default.createElement(react_1.default.StrictMode, null, client_1.default.createRoot(document.getElementById('root')).render(react_1.default.createElement(react_1.default.StrictMode, null,
react_1.default.createElement(App_1.default, null))); react_1.default.createElement(App_1.default, null)));

View file

@ -5,9 +5,11 @@ const electron_1 = require("electron");
const openai_service_1 = require("./openai.service"); const openai_service_1 = require("./openai.service");
class PlayerService { class PlayerService {
constructor() { constructor() {
console.log('[PlayerService] Initializing');
this.openAIService = new openai_service_1.OpenAIService(); this.openAIService = new openai_service_1.OpenAIService();
} }
async executeBasicCode(code) { async executeBasicCode(code) {
console.log('[PlayerService] executeBasicCode called with:', code);
const lines = code.split('\n'); const lines = code.split('\n');
for (const line of lines) { for (const line of lines) {
if (line.trim().startsWith('REM') || line.trim() === '') if (line.trim().startsWith('REM') || line.trim() === '')
@ -16,49 +18,58 @@ class PlayerService {
if (!match) if (!match)
continue; continue;
const [_, command, identifier, value] = match; const [_, command, identifier, value] = match;
console.log('[PlayerService] Executing command:', { command, identifier, value });
await this.executeCommand(command, identifier, value); await this.executeCommand(command, identifier, value);
await new Promise(resolve => setTimeout(resolve, 500)); await new Promise(resolve => setTimeout(resolve, 500));
} }
} }
async executeCommand(command, identifier, value) { async executeCommand(command, identifier, value) {
// Capture current screen console.log('[PlayerService] executeCommand called with:', { command, identifier, value });
const screenshotPath = await this.captureScreen(); const screenshotPath = await this.captureScreen();
console.log('[PlayerService] Screen captured at:', screenshotPath);
const analysis = await this.openAIService.analyzeScreen(screenshotPath); const analysis = await this.openAIService.analyzeScreen(screenshotPath);
const element = analysis.elements.find(e => e.identifier === identifier); const element = analysis.elements.find(e => e.identifier === identifier);
if (!element) if (!element)
throw new Error(`Element not found: ${identifier}`); throw new Error(`Element not found: ${identifier}`);
// Calculate center point of element
const centerX = element.bounds.x + element.bounds.width / 2; const centerX = element.bounds.x + element.bounds.width / 2;
const centerY = element.bounds.y + element.bounds.height / 2; const centerY = element.bounds.y + element.bounds.height / 2;
switch (command) { switch (command) {
case 'CLICK': case 'CLICK':
console.log('[PlayerService] Simulating click at:', { centerX, centerY });
await this.simulateClick(centerX, centerY); await this.simulateClick(centerX, centerY);
break; break;
case 'TYPE': case 'TYPE':
console.log('[PlayerService] Simulating type:', { centerX, centerY, value });
await this.simulateClick(centerX, centerY); await this.simulateClick(centerX, centerY);
await this.simulateTyping(value || ''); await this.simulateTyping(value || '');
break; break;
} }
} }
async captureScreen() { async captureScreen() {
console.log('[PlayerService] captureScreen called');
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
electron_1.ipcMain.once('screen-captured', (_, screenshotPath) => { electron_1.ipcMain.once('screen-captured', (_, screenshotPath) => {
console.log('[PlayerService] Screen captured event received:', screenshotPath);
resolve(screenshotPath); resolve(screenshotPath);
}); });
electron_1.ipcMain.emit('capture-screen'); electron_1.ipcMain.emit('capture-screen');
}); });
} }
async simulateClick(x, y) { async simulateClick(x, y) {
console.log('[PlayerService] simulateClick called with:', { x, y });
return new Promise((resolve) => { return new Promise((resolve) => {
electron_1.ipcMain.once('click-completed', () => { electron_1.ipcMain.once('click-completed', () => {
console.log('[PlayerService] Click completed');
resolve(); resolve();
}); });
electron_1.ipcMain.emit('simulate-click', { x, y }); electron_1.ipcMain.emit('simulate-click', { x, y });
}); });
} }
async simulateTyping(text) { async simulateTyping(text) {
console.log('[PlayerService] simulateTyping called with:', text);
return new Promise((resolve) => { return new Promise((resolve) => {
electron_1.ipcMain.once('typing-completed', () => { electron_1.ipcMain.once('typing-completed', () => {
console.log('[PlayerService] Typing completed');
resolve(); resolve();
}); });
electron_1.ipcMain.emit('simulate-typing', { text }); electron_1.ipcMain.emit('simulate-typing', { text });

View file

@ -26,7 +26,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.RecorderService = void 0; exports.RecorderService = void 0;
const electron_1 = require("electron"); const electron_1 = require("electron");
const openai_service_1 = require("../services/openai.service"); const openai_service_1 = require("../services/openai.service");
const _ = require('lodash');
const path = __importStar(require("path")); const path = __importStar(require("path"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
class RecorderService { class RecorderService {
@ -35,17 +34,18 @@ class RecorderService {
this.recording = false; this.recording = false;
this.currentScreenshot = ''; this.currentScreenshot = '';
this.lastTranscription = ''; this.lastTranscription = '';
this.recordingProcess = null;
this.currentAudioFile = ''; this.currentAudioFile = '';
this.silenceTimer = null; this.silenceTimer = null;
this.isProcessingAudio = false; this.isProcessingAudio = false;
this.handleAudioLevel = _.debounce(async (_, level) => { this.handleAudioLevel = async (_, level) => {
console.log('RecorderService.handleAudioLevel()', { level });
if (!this.recording) if (!this.recording)
return; return;
const SILENCE_THRESHOLD = 0.01; const SILENCE_THRESHOLD = 0.01;
const SILENCE_DURATION = 1000; const SILENCE_DURATION = 1000;
if (level < SILENCE_THRESHOLD) { if (level < SILENCE_THRESHOLD) {
if (!this.silenceTimer && !this.isProcessingAudio) { if (!this.silenceTimer && !this.isProcessingAudio) {
console.log('RecorderService.handleAudioLevel() - Setting silence timer');
this.silenceTimer = setTimeout(async () => { this.silenceTimer = setTimeout(async () => {
if (this.recording) { if (this.recording) {
await this.processSilence(); await this.processSilence();
@ -55,12 +55,14 @@ class RecorderService {
} }
else { else {
if (this.silenceTimer) { if (this.silenceTimer) {
console.log('RecorderService.handleAudioLevel() - Clearing silence timer');
clearTimeout(this.silenceTimer); clearTimeout(this.silenceTimer);
this.silenceTimer = null; this.silenceTimer = null;
} }
} }
}, 100); };
this.handleAudioChunk = async (_, chunk) => { this.handleAudioChunk = async (_, chunk) => {
console.log('RecorderService.handleAudioChunk()', { chunkSize: chunk.length });
if (!this.recording) if (!this.recording)
return; return;
try { try {
@ -73,9 +75,10 @@ class RecorderService {
} }
} }
catch (error) { catch (error) {
console.error('Error handling audio chunk:', error); console.error('RecorderService.handleAudioChunk() error:', error);
} }
}; };
console.log('RecorderService.constructor()');
this.openAIService = new openai_service_1.OpenAIService(); this.openAIService = new openai_service_1.OpenAIService();
this.tempDir = path.join(process.cwd(), 'temp_recordings'); this.tempDir = path.join(process.cwd(), 'temp_recordings');
if (!fs.existsSync(this.tempDir)) { if (!fs.existsSync(this.tempDir)) {
@ -83,36 +86,39 @@ class RecorderService {
} }
} }
async startRecording() { async startRecording() {
console.log('RecorderService.startRecording()');
try { try {
this.recording = true; this.recording = true;
this.events = []; this.events = [];
await this.setupAudioRecording(); await this.setupAudioRecording();
await this.requestScreenshot(); await this.requestScreenshot();
electron_1.ipcRenderer.on('keyboard-event', this.keyboardHandleEvent); // Listen for keyboard events electron_1.ipcRenderer.on('keyboard-event', this.keyboardHandleEvent);
} }
catch (error) { catch (error) {
console.error('Failed to start recording:', error); console.error('RecorderService.startRecording() error:', error);
this.recording = false; this.recording = false;
throw error; throw error;
} }
} }
async setupAudioRecording() { async setupAudioRecording() {
console.log('RecorderService.setupAudioRecording()');
try { try {
this.recordingProcess = await electron_1.ipcRenderer.invoke('start-audio-recording');
electron_1.ipcRenderer.on('audio-level', this.handleAudioLevel); electron_1.ipcRenderer.on('audio-level', this.handleAudioLevel);
electron_1.ipcRenderer.on('audio-chunk', this.handleAudioChunk); electron_1.ipcRenderer.on('audio-chunk', this.handleAudioChunk);
} }
catch (error) { catch (error) {
console.error('Error setting up audio recording:', error); console.error('RecorderService.setupAudioRecording() error:', error);
throw new Error(`Failed to setup audio recording: ${error.message}`); throw new Error(`Failed to setup audio recording: ${error.message}`);
} }
} }
async processSilence() { async processSilence() {
console.log('RecorderService.processSilence()');
if (this.isProcessingAudio) if (this.isProcessingAudio)
return; return;
this.isProcessingAudio = true; this.isProcessingAudio = true;
try { try {
const audioFilePath = await electron_1.ipcRenderer.invoke('save-audio-chunk'); const audioFilePath = await electron_1.ipcRenderer.invoke('save-audio-chunk');
console.log('RecorderService.processSilence() - Audio saved to:', audioFilePath);
if (audioFilePath) { if (audioFilePath) {
this.currentAudioFile = audioFilePath; this.currentAudioFile = audioFilePath;
await this.processAudioFile(audioFilePath); await this.processAudioFile(audioFilePath);
@ -120,32 +126,38 @@ class RecorderService {
} }
} }
catch (error) { catch (error) {
console.error('Error processing silence:', error); console.error('RecorderService.processSilence() error:', error);
} }
finally { finally {
this.isProcessingAudio = false; this.isProcessingAudio = false;
} }
} }
async processAudioFile(audioFilePath) { async processAudioFile(audioFilePath) {
console.log('RecorderService.processAudioFile()', { audioFilePath });
try { try {
const audioBuffer = fs.readFileSync(audioFilePath); const audioBuffer = fs.readFileSync(audioFilePath);
const transcription = await this.openAIService.transcribeAudio(new Blob([audioBuffer], { type: 'audio/wav' })); const transcription = await this.openAIService.transcribeAudio(new Blob([audioBuffer], { type: 'audio/wav' }));
console.log('RecorderService.processAudioFile() - Transcription:', transcription);
if (transcription.text.trim()) { if (transcription.text.trim()) {
await this.processTranscription(transcription); await this.processTranscription(transcription);
} }
fs.unlinkSync(audioFilePath); fs.unlinkSync(audioFilePath);
} }
catch (error) { catch (error) {
console.error('Error processing audio file:', error); console.error('RecorderService.processAudioFile() error:', error);
} }
} }
async processTranscription(transcription) { async processTranscription(transcription) {
console.log('RecorderService.processTranscription()', { transcription });
this.lastTranscription = transcription.text; this.lastTranscription = transcription.text;
const cursorPosition = await electron_1.ipcRenderer.invoke('get-cursor-position');
console.log('RecorderService.processTranscription() - Cursor position:', cursorPosition);
const analysis = await this.openAIService.analyzeScreenWithContext({ const analysis = await this.openAIService.analyzeScreenWithContext({
screenshot: this.currentScreenshot, screenshot: this.currentScreenshot,
transcription: this.lastTranscription, transcription: this.lastTranscription,
cursorPosition: await electron_1.ipcRenderer.invoke('get-cursor-position') cursorPosition
}); });
console.log('RecorderService.processTranscription() - Screen analysis:', analysis);
if (analysis) { if (analysis) {
this.events.push({ this.events.push({
type: analysis.type, type: analysis.type,
@ -157,34 +169,40 @@ class RecorderService {
} }
} }
async stopRecording() { async stopRecording() {
console.log('RecorderService.stopRecording()');
this.recording = false; this.recording = false;
if (this.silenceTimer) { if (this.silenceTimer) {
clearTimeout(this.silenceTimer); clearTimeout(this.silenceTimer);
this.silenceTimer = null; this.silenceTimer = null;
} }
await electron_1.ipcRenderer.invoke('stop-audio-recording');
electron_1.ipcRenderer.removeListener('audio-level', this.handleAudioLevel); electron_1.ipcRenderer.removeListener('audio-level', this.handleAudioLevel);
electron_1.ipcRenderer.removeListener('audio-chunk', this.handleAudioChunk); electron_1.ipcRenderer.removeListener('audio-chunk', this.handleAudioChunk);
electron_1.ipcRenderer.removeListener('keyboard-event', this.keyboardHandleEvent); // Remove keyboard listener electron_1.ipcRenderer.removeListener('keyboard-event', this.keyboardHandleEvent);
if (this.currentAudioFile && fs.existsSync(this.currentAudioFile)) { if (this.currentAudioFile && fs.existsSync(this.currentAudioFile)) {
fs.unlinkSync(this.currentAudioFile); fs.unlinkSync(this.currentAudioFile);
} }
return this.generateBasicCode(); const code = this.generateBasicCode();
console.log('RecorderService.stopRecording() - Generated code:', code);
return code;
} }
async requestScreenshot() { async requestScreenshot() {
console.log('RecorderService.requestScreenshot()');
try { try {
const sources = await electron_1.ipcRenderer.invoke('get-screenshot'); const sources = await electron_1.ipcRenderer.invoke('get-screenshot');
console.log('RecorderService.requestScreenshot() - Sources:', sources);
const screenSource = sources[0]; const screenSource = sources[0];
await this.screenshotHandleEvent(null, screenSource.thumbnail); await this.screenshotHandleEvent(null, screenSource.thumbnail);
} }
catch (error) { catch (error) {
console.error('Error capturing screenshot:', error); console.error('RecorderService.requestScreenshot() error:', error);
} }
} }
async screenshotHandleEvent(_, screenshot) { async screenshotHandleEvent(_, screenshot) {
console.log('RecorderService.screenshotHandleEvent()', { screenshot });
this.currentScreenshot = screenshot; this.currentScreenshot = screenshot;
} }
async keyboardHandleEvent(_, event) { async keyboardHandleEvent(_, event) {
console.log('RecorderService.keyboardHandleEvent()', { key: event.key });
if (!this.recording) if (!this.recording)
return; return;
this.events.push({ this.events.push({
@ -195,10 +213,13 @@ class RecorderService {
}); });
} }
async mouseHandleEvent(_, event) { async mouseHandleEvent(_, event) {
console.log('RecorderService.mouseHandleEvent()', { x: event.x, y: event.y });
if (!this.recording) if (!this.recording)
return; return;
const analysis = await this.openAIService.analyzeScreen(this.currentScreenshot); const analysis = await this.openAIService.analyzeScreen(this.currentScreenshot);
console.log('RecorderService.mouseHandleEvent() - Screen analysis:', analysis);
const element = this.findElementAtPosition(analysis, event.x, event.y); const element = this.findElementAtPosition(analysis, event.x, event.y);
console.log('RecorderService.mouseHandleEvent() - Found element:', element);
if (element) { if (element) {
this.events.push({ this.events.push({
type: 'click', type: 'click',
@ -209,16 +230,21 @@ class RecorderService {
} }
} }
findElementAtPosition(analysis, x, y) { findElementAtPosition(analysis, x, y) {
//@ts-nocheck console.log('RecorderService.findElementAtPosition()', { x, y, analysisElementsCount: analysis.elements.length });
return analysis.elements.find((element) => { return analysis.elements.find((element) => {
const bounds = element.bounds; const bounds = element.bounds;
return x >= bounds.x && const found = x >= bounds.x &&
x <= bounds.x + bounds.width && x <= bounds.x + bounds.width &&
y >= bounds.y && y >= bounds.y &&
y <= bounds.y + bounds.height; y <= bounds.y + bounds.height;
if (found) {
console.log('RecorderService.findElementAtPosition() - Found matching element:', element);
}
return found;
}); });
} }
generateBasicCode() { generateBasicCode() {
console.log('RecorderService.generateBasicCode()', { eventsCount: this.events.length });
let basicCode = '10 REM BotDesktop Automation Script\n'; let basicCode = '10 REM BotDesktop Automation Script\n';
let lineNumber = 20; let lineNumber = 20;
for (const event of this.events) { for (const event of this.events) {
@ -228,9 +254,6 @@ class RecorderService {
case 'click': case 'click':
basicCode += `${lineNumber} CLICK "${event.identifier}"\n`; basicCode += `${lineNumber} CLICK "${event.identifier}"\n`;
break; break;
case 'type':
basicCode += `${lineNumber} TYPE "${event.identifier}"\n`;
break;
case 'type': case 'type':
basicCode += `${lineNumber} TYPE "${event.identifier}" "${event.value}"\n`; basicCode += `${lineNumber} TYPE "${event.identifier}" "${event.value}"\n`;
break; break;
@ -241,6 +264,7 @@ class RecorderService {
lineNumber += 10; lineNumber += 10;
} }
basicCode += `${lineNumber} END\n`; basicCode += `${lineNumber} END\n`;
console.log('RecorderService.generateBasicCode() - Generated code:', basicCode);
return basicCode; return basicCode;
} }
} }

7
package-lock.json generated
View file

@ -14,6 +14,7 @@
"debounce": "^2.2.0", "debounce": "^2.2.0",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"electron": "^28.0.0", "electron": "^28.0.0",
"electron-require": "^0.3.0",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"node-global-key-listener": "^0.3.0", "node-global-key-listener": "^0.3.0",
"node-mouse": "^0.0.2", "node-mouse": "^0.0.2",
@ -3508,6 +3509,12 @@
"node": ">= 10.0.0" "node": ">= 10.0.0"
} }
}, },
"node_modules/electron-require": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/electron-require/-/electron-require-0.3.0.tgz",
"integrity": "sha512-/e3qgt6h2rxVD0I35KLsjbZKBYdJKRA7dyFyehdnVXqo5MVrWF0f0h9j0n5qWpAmr/ahETN33kv6985cHUwivw==",
"license": "MIT"
},
"node_modules/electron-to-chromium": { "node_modules/electron-to-chromium": {
"version": "1.5.47", "version": "1.5.47",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.47.tgz", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.47.tgz",

View file

@ -16,7 +16,8 @@
"debounce": "^2.2.0", "debounce": "^2.2.0",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"electron": "^28.0.0", "electron": "^28.0.0",
"lodash": "^4.17.21", "electron-require": "^0.3.0",
"node-global-key-listener": "^0.3.0", "node-global-key-listener": "^0.3.0",
"node-mouse": "^0.0.2", "node-mouse": "^0.0.2",
"openai": "^4.28.0", "openai": "^4.28.0",

View file

@ -1,8 +1,9 @@
require('dotenv').config(); require('dotenv').config();
import { app, BrowserWindow, ipcMain } from 'electron'; require('electron-require');
import { app, BrowserWindow, desktopCapturer, ipcMain } from 'electron';
import * as path from 'path'; import * as path from 'path';
// In main.ts import { systemPreferences } from 'electron';
import { systemPreferences } from 'electron';
import { RecorderService } from '../services/recorder.service'; import { RecorderService } from '../services/recorder.service';
import { PlayerService } from '../services/player.service'; import { PlayerService } from '../services/player.service';
@ -13,8 +14,11 @@ function createWindow() {
const mainWindow = new BrowserWindow({ const mainWindow = new BrowserWindow({
width: 1200, width: 1200,
height: 800, height: 800,
webPreferences: { webPreferences: {
nodeIntegrationInWorker: true,
nodeIntegration: true, nodeIntegration: true,
nodeIntegrationInSubFrames: true,
contextIsolation: false, contextIsolation: false,
preload: path.join(__dirname, '../preload/preload.js') preload: path.join(__dirname, '../preload/preload.js')
} }
@ -46,22 +50,33 @@ ipcMain.handle('mouse-event', recorder.mouseHandleEvent.bind(recorder));
ipcMain.handle('keyboard-event', recorder.keyboardHandleEvent.bind(recorder)); ipcMain.handle('keyboard-event', recorder.keyboardHandleEvent.bind(recorder));
ipcMain.handle('screenshot-captured', recorder.screenshotHandleEvent.bind(recorder)); ipcMain.handle('screenshot-captured', recorder.screenshotHandleEvent.bind(recorder));
// Handler to capture the entire screen
ipcMain.handle('get-screenshot', async () => {
console.log('get-screenshot called');
const sources = await desktopCapturer.getSources({ types: ['screen'] });
const screenSource = sources[0]; // Get the first screen source
const { thumbnail } = screenSource; // Thumbnail is a native image
return thumbnail.toPNG(); // Return the screenshot as PNG buffer
});
ipcMain.handle('start-recording', async () => { ipcMain.handle('start-recording', async () => {
console.log('start-recording called');
await recorder.startRecording(); await recorder.startRecording();
}); });
ipcMain.handle('stop-recording', async () => { ipcMain.handle('stop-recording', async () => {
console.log('stop-recording called');
return await recorder.stopRecording(); return await recorder.stopRecording();
}); });
ipcMain.handle('execute-basic-code', async (_, code: string) => { ipcMain.handle('execute-basic-code', async (_, code: string) => {
console.log('execute-basic-code called with:', code);
await player.executeBasicCode(code); await player.executeBasicCode(code);
}); });
// Add microphone permission check for macOS
ipcMain.handle('check-microphone-permission', async () => { ipcMain.handle('check-microphone-permission', async () => {
console.log('check-microphone-permission called');
if (process.platform === 'darwin') { if (process.platform === 'darwin') {
const status = await systemPreferences.getMediaAccessStatus('microphone'); const status = await systemPreferences.getMediaAccessStatus('microphone');
if (status !== 'granted') { if (status !== 'granted') {
@ -70,9 +85,8 @@ ipcMain.handle('check-microphone-permission', async () => {
} }
return true; return true;
} }
// On Windows/Linux, permissions are handled by the OS return true; // On Windows/Linux, permissions are handled by the OS
return true;
}); });
// Enable required permissions // Enable required permissions
app.commandLine.appendSwitch('enable-speech-dispatcher'); app.commandLine.appendSwitch('enable-speech-dispatcher');

View file

@ -0,0 +1,13 @@
const { ipcRenderer } = require('electron');
//@ts-nocheck
(window as any).myApi = {
sendMessage: (message: any) => {
console.log('[preload] sendMessage called with:', message);
return ipcRenderer.send('message-from-renderer', message);
},
receiveMessage: (callback: any) => {
console.log('[preload] receiveMessage registered with callback');
return ipcRenderer.on('message-from-main', (event, arg) => callback(arg));
},
};

View file

@ -3,6 +3,7 @@
<head> <head>
<meta charset="UTF-8"> <meta charset="UTF-8">
<title>BotDesktop</title> <title>BotDesktop</title>
<script>var global = global || window;</script>
<script src="https://cdn.tailwindcss.com"></script> <script src="https://cdn.tailwindcss.com"></script>
</head> </head>
<body> <body>

View file

@ -1,10 +1,8 @@
import React from 'react'; import React from 'react';
import ReactDOM from 'react-dom/client'; import ReactDOM from 'react-dom/client';
import App from '../components/App'; import App from '../components/App';
console.log('[renderer] Initializing React app');
ReactDOM.createRoot( ReactDOM.createRoot(
document.getElementById('root') as HTMLElement document.getElementById('root') as HTMLElement
).render( ).render(

View file

@ -6,10 +6,12 @@ export class PlayerService {
private openAIService: OpenAIService; private openAIService: OpenAIService;
constructor() { constructor() {
console.log('[PlayerService] Initializing');
this.openAIService = new OpenAIService(); this.openAIService = new OpenAIService();
} }
async executeBasicCode(code: string) { async executeBasicCode(code: string) {
console.log('[PlayerService] executeBasicCode called with:', code);
const lines = code.split('\n'); const lines = code.split('\n');
for (const line of lines) { for (const line of lines) {
@ -19,29 +21,33 @@ export class PlayerService {
if (!match) continue; if (!match) continue;
const [_, command, identifier, value] = match; const [_, command, identifier, value] = match;
console.log('[PlayerService] Executing command:', { command, identifier, value });
await this.executeCommand(command, identifier, value); await this.executeCommand(command, identifier, value);
await new Promise(resolve => setTimeout(resolve, 500)); await new Promise(resolve => setTimeout(resolve, 500));
} }
} }
private async executeCommand(command: string, identifier: string, value?: string) { private async executeCommand(command: string, identifier: string, value?: string) {
// Capture current screen console.log('[PlayerService] executeCommand called with:', { command, identifier, value });
const screenshotPath = await this.captureScreen(); const screenshotPath = await this.captureScreen();
console.log('[PlayerService] Screen captured at:', screenshotPath);
const analysis = await this.openAIService.analyzeScreen(screenshotPath); const analysis = await this.openAIService.analyzeScreen(screenshotPath);
const element = analysis.elements.find(e => e.identifier === identifier); const element = analysis.elements.find(e => e.identifier === identifier);
if (!element) throw new Error(`Element not found: ${identifier}`); if (!element) throw new Error(`Element not found: ${identifier}`);
// Calculate center point of element
const centerX = element.bounds.x + element.bounds.width/2; const centerX = element.bounds.x + element.bounds.width/2;
const centerY = element.bounds.y + element.bounds.height/2; const centerY = element.bounds.y + element.bounds.height/2;
switch (command) { switch (command) {
case 'CLICK': case 'CLICK':
console.log('[PlayerService] Simulating click at:', { centerX, centerY });
await this.simulateClick(centerX, centerY); await this.simulateClick(centerX, centerY);
break; break;
case 'TYPE': case 'TYPE':
console.log('[PlayerService] Simulating type:', { centerX, centerY, value });
await this.simulateClick(centerX, centerY); await this.simulateClick(centerX, centerY);
await this.simulateTyping(value || ''); await this.simulateTyping(value || '');
break; break;
@ -49,8 +55,10 @@ export class PlayerService {
} }
private async captureScreen(): Promise<string> { private async captureScreen(): Promise<string> {
console.log('[PlayerService] captureScreen called');
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
ipcMain.once('screen-captured', (_, screenshotPath) => { ipcMain.once('screen-captured', (_, screenshotPath) => {
console.log('[PlayerService] Screen captured event received:', screenshotPath);
resolve(screenshotPath); resolve(screenshotPath);
}); });
@ -59,8 +67,10 @@ export class PlayerService {
} }
private async simulateClick(x: number, y: number): Promise<void> { private async simulateClick(x: number, y: number): Promise<void> {
console.log('[PlayerService] simulateClick called with:', { x, y });
return new Promise((resolve) => { return new Promise((resolve) => {
ipcMain.once('click-completed', () => { ipcMain.once('click-completed', () => {
console.log('[PlayerService] Click completed');
resolve(); resolve();
}); });
@ -69,12 +79,14 @@ export class PlayerService {
} }
private async simulateTyping(text: string): Promise<void> { private async simulateTyping(text: string): Promise<void> {
console.log('[PlayerService] simulateTyping called with:', text);
return new Promise((resolve) => { return new Promise((resolve) => {
ipcMain.once('typing-completed', () => { ipcMain.once('typing-completed', () => {
console.log('[PlayerService] Typing completed');
resolve(); resolve();
}); });
ipcMain.emit('simulate-typing', { text }); ipcMain.emit('simulate-typing', { text });
}); });
} }
} }

View file

@ -1,7 +1,6 @@
import { ipcRenderer } from 'electron'; import { ipcRenderer } from 'electron';
import { AutomationEvent, ScreenAnalysis, WhisperResponse } from '../services/types'; import { AutomationEvent, ScreenAnalysis, WhisperResponse } from '../services/types';
import { OpenAIService } from '../services/openai.service'; import { OpenAIService } from '../services/openai.service';
const _ = require('lodash');
import * as path from 'path'; import * as path from 'path';
import * as fs from 'fs'; import * as fs from 'fs';
@ -11,13 +10,13 @@ export class RecorderService {
private openAIService: OpenAIService; private openAIService: OpenAIService;
private currentScreenshot: string = ''; private currentScreenshot: string = '';
private lastTranscription: string = ''; private lastTranscription: string = '';
private recordingProcess: any = null;
private tempDir: string;
private currentAudioFile: string = ''; private currentAudioFile: string = '';
private silenceTimer: NodeJS.Timeout | null = null; private silenceTimer: NodeJS.Timeout | null = null;
private isProcessingAudio: boolean = false; private isProcessingAudio: boolean = false;
private tempDir: string;
constructor() { constructor() {
console.log('RecorderService.constructor()');
this.openAIService = new OpenAIService(); this.openAIService = new OpenAIService();
this.tempDir = path.join(process.cwd(), 'temp_recordings'); this.tempDir = path.join(process.cwd(), 'temp_recordings');
if (!fs.existsSync(this.tempDir)) { if (!fs.existsSync(this.tempDir)) {
@ -26,31 +25,33 @@ export class RecorderService {
} }
public async startRecording() { public async startRecording() {
console.log('RecorderService.startRecording()');
try { try {
this.recording = true; this.recording = true;
this.events = []; this.events = [];
await this.setupAudioRecording(); await this.setupAudioRecording();
await this.requestScreenshot(); await this.requestScreenshot();
ipcRenderer.on('keyboard-event', this.keyboardHandleEvent); // Listen for keyboard events ipcRenderer.on('keyboard-event', this.keyboardHandleEvent);
} catch (error) { } catch (error) {
console.error('Failed to start recording:', error); console.error('RecorderService.startRecording() error:', error);
this.recording = false; this.recording = false;
throw error; throw error;
} }
} }
private async setupAudioRecording() { private async setupAudioRecording() {
console.log('RecorderService.setupAudioRecording()');
try { try {
this.recordingProcess = await ipcRenderer.invoke('start-audio-recording');
ipcRenderer.on('audio-level', this.handleAudioLevel); ipcRenderer.on('audio-level', this.handleAudioLevel);
ipcRenderer.on('audio-chunk', this.handleAudioChunk); ipcRenderer.on('audio-chunk', this.handleAudioChunk);
} catch (error) { } catch (error) {
console.error('Error setting up audio recording:', error); console.error('RecorderService.setupAudioRecording() error:', error);
throw new Error(`Failed to setup audio recording: ${error.message}`); throw new Error(`Failed to setup audio recording: ${error.message}`);
} }
} }
private handleAudioLevel = _.debounce(async (_: any, level: number) => { private handleAudioLevel = async (_: any, level: number) => {
console.log('RecorderService.handleAudioLevel()', { level });
if (!this.recording) return; if (!this.recording) return;
const SILENCE_THRESHOLD = 0.01; const SILENCE_THRESHOLD = 0.01;
@ -58,6 +59,7 @@ export class RecorderService {
if (level < SILENCE_THRESHOLD) { if (level < SILENCE_THRESHOLD) {
if (!this.silenceTimer && !this.isProcessingAudio) { if (!this.silenceTimer && !this.isProcessingAudio) {
console.log('RecorderService.handleAudioLevel() - Setting silence timer');
this.silenceTimer = setTimeout(async () => { this.silenceTimer = setTimeout(async () => {
if (this.recording) { if (this.recording) {
await this.processSilence(); await this.processSilence();
@ -66,13 +68,15 @@ export class RecorderService {
} }
} else { } else {
if (this.silenceTimer) { if (this.silenceTimer) {
console.log('RecorderService.handleAudioLevel() - Clearing silence timer');
clearTimeout(this.silenceTimer); clearTimeout(this.silenceTimer);
this.silenceTimer = null; this.silenceTimer = null;
} }
} }
}, 100); }
private handleAudioChunk = async (_: any, chunk: Buffer) => { private handleAudioChunk = async (_: any, chunk: Buffer) => {
console.log('RecorderService.handleAudioChunk()', { chunkSize: chunk.length });
if (!this.recording) return; if (!this.recording) return;
try { try {
@ -85,34 +89,38 @@ export class RecorderService {
await this.processAudioFile(audioFilePath); await this.processAudioFile(audioFilePath);
} }
} catch (error) { } catch (error) {
console.error('Error handling audio chunk:', error); console.error('RecorderService.handleAudioChunk() error:', error);
} }
}; };
private async processSilence() { private async processSilence() {
console.log('RecorderService.processSilence()');
if (this.isProcessingAudio) return; if (this.isProcessingAudio) return;
this.isProcessingAudio = true; this.isProcessingAudio = true;
try { try {
const audioFilePath = await ipcRenderer.invoke('save-audio-chunk'); const audioFilePath = await ipcRenderer.invoke('save-audio-chunk');
console.log('RecorderService.processSilence() - Audio saved to:', audioFilePath);
if (audioFilePath) { if (audioFilePath) {
this.currentAudioFile = audioFilePath; this.currentAudioFile = audioFilePath;
await this.processAudioFile(audioFilePath); await this.processAudioFile(audioFilePath);
await this.requestScreenshot(); await this.requestScreenshot();
} }
} catch (error) { } catch (error) {
console.error('Error processing silence:', error); console.error('RecorderService.processSilence() error:', error);
} finally { } finally {
this.isProcessingAudio = false; this.isProcessingAudio = false;
} }
} }
private async processAudioFile(audioFilePath: string) { private async processAudioFile(audioFilePath: string) {
console.log('RecorderService.processAudioFile()', { audioFilePath });
try { try {
const audioBuffer = fs.readFileSync(audioFilePath); const audioBuffer = fs.readFileSync(audioFilePath);
const transcription = await this.openAIService.transcribeAudio( const transcription = await this.openAIService.transcribeAudio(
new Blob([audioBuffer], { type: 'audio/wav' }) new Blob([audioBuffer], { type: 'audio/wav' })
); );
console.log('RecorderService.processAudioFile() - Transcription:', transcription);
if (transcription.text.trim()) { if (transcription.text.trim()) {
await this.processTranscription(transcription); await this.processTranscription(transcription);
@ -120,18 +128,23 @@ export class RecorderService {
fs.unlinkSync(audioFilePath); fs.unlinkSync(audioFilePath);
} catch (error) { } catch (error) {
console.error('Error processing audio file:', error); console.error('RecorderService.processAudioFile() error:', error);
} }
} }
private async processTranscription(transcription: WhisperResponse) { private async processTranscription(transcription: WhisperResponse) {
console.log('RecorderService.processTranscription()', { transcription });
this.lastTranscription = transcription.text; this.lastTranscription = transcription.text;
const cursorPosition = await ipcRenderer.invoke('get-cursor-position');
console.log('RecorderService.processTranscription() - Cursor position:', cursorPosition);
const analysis = await this.openAIService.analyzeScreenWithContext({ const analysis = await this.openAIService.analyzeScreenWithContext({
screenshot: this.currentScreenshot, screenshot: this.currentScreenshot,
transcription: this.lastTranscription, transcription: this.lastTranscription,
cursorPosition: await ipcRenderer.invoke('get-cursor-position') cursorPosition
}); });
console.log('RecorderService.processTranscription() - Screen analysis:', analysis);
if (analysis) { if (analysis) {
this.events.push({ this.events.push({
@ -145,6 +158,7 @@ export class RecorderService {
} }
public async stopRecording(): Promise<string> { public async stopRecording(): Promise<string> {
console.log('RecorderService.stopRecording()');
this.recording = false; this.recording = false;
if (this.silenceTimer) { if (this.silenceTimer) {
@ -152,33 +166,38 @@ export class RecorderService {
this.silenceTimer = null; this.silenceTimer = null;
} }
await ipcRenderer.invoke('stop-audio-recording');
ipcRenderer.removeListener('audio-level', this.handleAudioLevel); ipcRenderer.removeListener('audio-level', this.handleAudioLevel);
ipcRenderer.removeListener('audio-chunk', this.handleAudioChunk); ipcRenderer.removeListener('audio-chunk', this.handleAudioChunk);
ipcRenderer.removeListener('keyboard-event', this.keyboardHandleEvent); // Remove keyboard listener ipcRenderer.removeListener('keyboard-event', this.keyboardHandleEvent);
if (this.currentAudioFile && fs.existsSync(this.currentAudioFile)) { if (this.currentAudioFile && fs.existsSync(this.currentAudioFile)) {
fs.unlinkSync(this.currentAudioFile); fs.unlinkSync(this.currentAudioFile);
} }
return this.generateBasicCode(); const code = this.generateBasicCode();
console.log('RecorderService.stopRecording() - Generated code:', code);
return code;
} }
private async requestScreenshot() { private async requestScreenshot() {
console.log('RecorderService.requestScreenshot()');
try { try {
const sources = await ipcRenderer.invoke('get-screenshot'); const sources = await ipcRenderer.invoke('get-screenshot');
console.log('RecorderService.requestScreenshot() - Sources:', sources);
const screenSource = sources[0]; const screenSource = sources[0];
await this.screenshotHandleEvent(null, screenSource.thumbnail); await this.screenshotHandleEvent(null, screenSource.thumbnail);
} catch (error) { } catch (error) {
console.error('Error capturing screenshot:', error); console.error('RecorderService.requestScreenshot() error:', error);
} }
} }
public async screenshotHandleEvent(_: any, screenshot: string) { public async screenshotHandleEvent(_: any, screenshot: string) {
console.log('RecorderService.screenshotHandleEvent()', { screenshot });
this.currentScreenshot = screenshot; this.currentScreenshot = screenshot;
} }
public async keyboardHandleEvent(_: any, event: KeyboardEvent) { public async keyboardHandleEvent(_: any, event: KeyboardEvent) {
console.log('RecorderService.keyboardHandleEvent()', { key: event.key });
if (!this.recording) return; if (!this.recording) return;
this.events.push({ this.events.push({
@ -190,10 +209,14 @@ export class RecorderService {
} }
public async mouseHandleEvent(_: any, event: any) { public async mouseHandleEvent(_: any, event: any) {
console.log('RecorderService.mouseHandleEvent()', { x: event.x, y: event.y });
if (!this.recording) return; if (!this.recording) return;
const analysis = await this.openAIService.analyzeScreen(this.currentScreenshot); const analysis = await this.openAIService.analyzeScreen(this.currentScreenshot);
console.log('RecorderService.mouseHandleEvent() - Screen analysis:', analysis);
const element = this.findElementAtPosition(analysis, event.x, event.y); const element = this.findElementAtPosition(analysis, event.x, event.y);
console.log('RecorderService.mouseHandleEvent() - Found element:', element);
if (element) { if (element) {
this.events.push({ this.events.push({
@ -206,17 +229,22 @@ export class RecorderService {
} }
private findElementAtPosition(analysis: ScreenAnalysis, x: number, y: number) { private findElementAtPosition(analysis: ScreenAnalysis, x: number, y: number) {
//@ts-nocheck console.log('RecorderService.findElementAtPosition()', { x, y, analysisElementsCount: analysis.elements.length });
return analysis.elements.find((element) => { return analysis.elements.find((element) => {
const bounds = element.bounds; const bounds = element.bounds;
return x >= bounds.x && const found = x >= bounds.x &&
x <= bounds.x + bounds.width && x <= bounds.x + bounds.width &&
y >= bounds.y && y >= bounds.y &&
y <= bounds.y + bounds.height; y <= bounds.y + bounds.height;
if (found) {
console.log('RecorderService.findElementAtPosition() - Found matching element:', element);
}
return found;
}); });
} }
private generateBasicCode(): string { private generateBasicCode(): string {
console.log('RecorderService.generateBasicCode()', { eventsCount: this.events.length });
let basicCode = '10 REM BotDesktop Automation Script\n'; let basicCode = '10 REM BotDesktop Automation Script\n';
let lineNumber = 20; let lineNumber = 20;
@ -228,9 +256,6 @@ export class RecorderService {
case 'click': case 'click':
basicCode += `${lineNumber} CLICK "${event.identifier}"\n`; basicCode += `${lineNumber} CLICK "${event.identifier}"\n`;
break; break;
case 'type':
basicCode += `${lineNumber} TYPE "${event.identifier}"\n`;
break;
case 'type': case 'type':
basicCode += `${lineNumber} TYPE "${event.identifier}" "${event.value}"\n`; basicCode += `${lineNumber} TYPE "${event.identifier}" "${event.value}"\n`;
break; break;
@ -242,6 +267,7 @@ export class RecorderService {
} }
basicCode += `${lineNumber} END\n`; basicCode += `${lineNumber} END\n`;
console.log('RecorderService.generateBasicCode() - Generated code:', basicCode);
return basicCode; return basicCode;
} }
} }

View file

@ -11,6 +11,8 @@ export interface AutomationAction {
}; };
} }
export interface AutomationEvent { export interface AutomationEvent {
type: 'click' | 'type' | 'move'; type: 'click' | 'type' | 'move';
identifier: string; identifier: string;

View file

@ -1,5 +1,7 @@
const path = require('path'); const path = require('path');
const HtmlWebpackPlugin = require('html-webpack-plugin'); const HtmlWebpackPlugin = require('html-webpack-plugin');
// webpack.config.js
const webpack = require('webpack');
module.exports = { module.exports = {
devtool: 'source-map', devtool: 'source-map',
@ -22,7 +24,10 @@ module.exports = {
path: path.resolve(__dirname, 'dist/renderer'), path: path.resolve(__dirname, 'dist/renderer'),
}, },
plugins: [ plugins: [
new HtmlWebpackPlugin({ new webpack.ProvidePlugin({
global: 'global', // This will make global available in your bundled code
}),
new HtmlWebpackPlugin({
template: './src/renderer/index.html' template: './src/renderer/index.html'
}), }),
], ],