2024-10-26 13:05:56 -03:00
|
|
|
"use strict";
|
2024-10-26 16:26:11 -03:00
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
|
|
|
if (k2 === undefined) k2 = k;
|
|
|
|
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
|
|
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
|
|
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
|
|
|
}
|
|
|
|
|
Object.defineProperty(o, k2, desc);
|
|
|
|
|
}) : (function(o, m, k, k2) {
|
|
|
|
|
if (k2 === undefined) k2 = k;
|
|
|
|
|
o[k2] = m[k];
|
|
|
|
|
}));
|
|
|
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
|
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
|
|
|
}) : function(o, v) {
|
|
|
|
|
o["default"] = v;
|
|
|
|
|
});
|
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
|
|
if (mod && mod.__esModule) return mod;
|
|
|
|
|
var result = {};
|
|
|
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
|
|
|
__setModuleDefault(result, mod);
|
|
|
|
|
return result;
|
2024-10-26 13:05:56 -03:00
|
|
|
};
|
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
|
|
exports.RecorderService = void 0;
|
|
|
|
|
const electron_1 = require("electron");
|
2024-10-26 16:26:11 -03:00
|
|
|
const openai_service_1 = require("../services/openai.service");
|
|
|
|
|
const path = __importStar(require("path"));
|
|
|
|
|
const fs = __importStar(require("fs"));
|
2024-10-26 13:05:56 -03:00
|
|
|
class RecorderService {
|
|
|
|
|
constructor() {
|
|
|
|
|
this.events = [];
|
|
|
|
|
this.recording = false;
|
|
|
|
|
this.currentScreenshot = '';
|
2024-10-26 16:26:11 -03:00
|
|
|
this.lastTranscription = '';
|
|
|
|
|
this.currentAudioFile = '';
|
|
|
|
|
this.silenceTimer = null;
|
|
|
|
|
this.isProcessingAudio = false;
|
2024-10-26 21:21:51 -03:00
|
|
|
this.handleAudioLevel = async (_, level) => {
|
|
|
|
|
console.log('RecorderService.handleAudioLevel()', { level });
|
2024-10-26 16:26:11 -03:00
|
|
|
if (!this.recording)
|
|
|
|
|
return;
|
|
|
|
|
const SILENCE_THRESHOLD = 0.01;
|
|
|
|
|
const SILENCE_DURATION = 1000;
|
|
|
|
|
if (level < SILENCE_THRESHOLD) {
|
|
|
|
|
if (!this.silenceTimer && !this.isProcessingAudio) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.handleAudioLevel() - Setting silence timer');
|
2024-10-26 16:26:11 -03:00
|
|
|
this.silenceTimer = setTimeout(async () => {
|
|
|
|
|
if (this.recording) {
|
|
|
|
|
await this.processSilence();
|
|
|
|
|
}
|
|
|
|
|
}, SILENCE_DURATION);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
if (this.silenceTimer) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.handleAudioLevel() - Clearing silence timer');
|
2024-10-26 16:26:11 -03:00
|
|
|
clearTimeout(this.silenceTimer);
|
|
|
|
|
this.silenceTimer = null;
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-10-26 21:21:51 -03:00
|
|
|
};
|
2024-10-26 16:26:11 -03:00
|
|
|
this.handleAudioChunk = async (_, chunk) => {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.handleAudioChunk()', { chunkSize: chunk.length });
|
2024-10-26 16:26:11 -03:00
|
|
|
if (!this.recording)
|
|
|
|
|
return;
|
|
|
|
|
try {
|
|
|
|
|
const audioFilePath = path.join(this.tempDir, `audio-${Date.now()}.wav`);
|
|
|
|
|
fs.writeFileSync(audioFilePath, chunk);
|
|
|
|
|
if (this.silenceTimer) {
|
|
|
|
|
clearTimeout(this.silenceTimer);
|
|
|
|
|
this.silenceTimer = null;
|
|
|
|
|
await this.processAudioFile(audioFilePath);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
catch (error) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.error('RecorderService.handleAudioChunk() error:', error);
|
2024-10-26 16:26:11 -03:00
|
|
|
}
|
|
|
|
|
};
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.constructor()');
|
2024-10-26 13:05:56 -03:00
|
|
|
this.openAIService = new openai_service_1.OpenAIService();
|
2024-10-26 16:26:11 -03:00
|
|
|
this.tempDir = path.join(process.cwd(), 'temp_recordings');
|
|
|
|
|
if (!fs.existsSync(this.tempDir)) {
|
|
|
|
|
fs.mkdirSync(this.tempDir, { recursive: true });
|
|
|
|
|
}
|
2024-10-26 13:05:56 -03:00
|
|
|
}
|
|
|
|
|
async startRecording() {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.startRecording()');
|
2024-10-26 16:26:11 -03:00
|
|
|
try {
|
|
|
|
|
this.recording = true;
|
|
|
|
|
this.events = [];
|
|
|
|
|
await this.setupAudioRecording();
|
|
|
|
|
await this.requestScreenshot();
|
2024-10-26 21:21:51 -03:00
|
|
|
electron_1.ipcRenderer.on('keyboard-event', this.keyboardHandleEvent);
|
2024-10-26 16:26:11 -03:00
|
|
|
}
|
|
|
|
|
catch (error) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.error('RecorderService.startRecording() error:', error);
|
2024-10-26 16:26:11 -03:00
|
|
|
this.recording = false;
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
async setupAudioRecording() {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.setupAudioRecording()');
|
2024-10-26 16:26:11 -03:00
|
|
|
try {
|
|
|
|
|
electron_1.ipcRenderer.on('audio-level', this.handleAudioLevel);
|
|
|
|
|
electron_1.ipcRenderer.on('audio-chunk', this.handleAudioChunk);
|
|
|
|
|
}
|
|
|
|
|
catch (error) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.error('RecorderService.setupAudioRecording() error:', error);
|
2024-10-26 16:26:11 -03:00
|
|
|
throw new Error(`Failed to setup audio recording: ${error.message}`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
async processSilence() {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.processSilence()');
|
2024-10-26 16:26:11 -03:00
|
|
|
if (this.isProcessingAudio)
|
|
|
|
|
return;
|
|
|
|
|
this.isProcessingAudio = true;
|
|
|
|
|
try {
|
|
|
|
|
const audioFilePath = await electron_1.ipcRenderer.invoke('save-audio-chunk');
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.processSilence() - Audio saved to:', audioFilePath);
|
2024-10-26 16:26:11 -03:00
|
|
|
if (audioFilePath) {
|
|
|
|
|
this.currentAudioFile = audioFilePath;
|
|
|
|
|
await this.processAudioFile(audioFilePath);
|
|
|
|
|
await this.requestScreenshot();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
catch (error) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.error('RecorderService.processSilence() error:', error);
|
2024-10-26 16:26:11 -03:00
|
|
|
}
|
|
|
|
|
finally {
|
|
|
|
|
this.isProcessingAudio = false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
async processAudioFile(audioFilePath) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.processAudioFile()', { audioFilePath });
|
2024-10-26 16:26:11 -03:00
|
|
|
try {
|
|
|
|
|
const audioBuffer = fs.readFileSync(audioFilePath);
|
|
|
|
|
const transcription = await this.openAIService.transcribeAudio(new Blob([audioBuffer], { type: 'audio/wav' }));
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.processAudioFile() - Transcription:', transcription);
|
2024-10-26 16:26:11 -03:00
|
|
|
if (transcription.text.trim()) {
|
|
|
|
|
await this.processTranscription(transcription);
|
|
|
|
|
}
|
|
|
|
|
fs.unlinkSync(audioFilePath);
|
|
|
|
|
}
|
|
|
|
|
catch (error) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.error('RecorderService.processAudioFile() error:', error);
|
2024-10-26 16:26:11 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
async processTranscription(transcription) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.processTranscription()', { transcription });
|
2024-10-26 16:26:11 -03:00
|
|
|
this.lastTranscription = transcription.text;
|
2024-10-26 21:21:51 -03:00
|
|
|
const cursorPosition = await electron_1.ipcRenderer.invoke('get-cursor-position');
|
|
|
|
|
console.log('RecorderService.processTranscription() - Cursor position:', cursorPosition);
|
2024-10-26 16:26:11 -03:00
|
|
|
const analysis = await this.openAIService.analyzeScreenWithContext({
|
|
|
|
|
screenshot: this.currentScreenshot,
|
|
|
|
|
transcription: this.lastTranscription,
|
2024-10-26 21:21:51 -03:00
|
|
|
cursorPosition
|
2024-10-26 16:26:11 -03:00
|
|
|
});
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.processTranscription() - Screen analysis:', analysis);
|
2024-10-26 16:26:11 -03:00
|
|
|
if (analysis) {
|
|
|
|
|
this.events.push({
|
|
|
|
|
type: analysis.type,
|
|
|
|
|
identifier: analysis.identifier,
|
|
|
|
|
value: analysis.value,
|
|
|
|
|
timestamp: Date.now(),
|
|
|
|
|
narration: this.lastTranscription
|
|
|
|
|
});
|
|
|
|
|
}
|
2024-10-26 13:05:56 -03:00
|
|
|
}
|
2024-10-26 16:26:11 -03:00
|
|
|
async stopRecording() {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.stopRecording()');
|
2024-10-26 13:05:56 -03:00
|
|
|
this.recording = false;
|
2024-10-26 16:26:11 -03:00
|
|
|
if (this.silenceTimer) {
|
|
|
|
|
clearTimeout(this.silenceTimer);
|
|
|
|
|
this.silenceTimer = null;
|
|
|
|
|
}
|
|
|
|
|
electron_1.ipcRenderer.removeListener('audio-level', this.handleAudioLevel);
|
|
|
|
|
electron_1.ipcRenderer.removeListener('audio-chunk', this.handleAudioChunk);
|
2024-10-26 21:21:51 -03:00
|
|
|
electron_1.ipcRenderer.removeListener('keyboard-event', this.keyboardHandleEvent);
|
2024-10-26 16:26:11 -03:00
|
|
|
if (this.currentAudioFile && fs.existsSync(this.currentAudioFile)) {
|
|
|
|
|
fs.unlinkSync(this.currentAudioFile);
|
|
|
|
|
}
|
2024-10-26 21:21:51 -03:00
|
|
|
const code = this.generateBasicCode();
|
|
|
|
|
console.log('RecorderService.stopRecording() - Generated code:', code);
|
|
|
|
|
return code;
|
2024-10-26 13:05:56 -03:00
|
|
|
}
|
2024-10-26 16:26:11 -03:00
|
|
|
async requestScreenshot() {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.requestScreenshot()');
|
2024-10-26 16:26:11 -03:00
|
|
|
try {
|
|
|
|
|
const sources = await electron_1.ipcRenderer.invoke('get-screenshot');
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.requestScreenshot() - Sources:', sources);
|
2024-10-26 16:26:11 -03:00
|
|
|
const screenSource = sources[0];
|
|
|
|
|
await this.screenshotHandleEvent(null, screenSource.thumbnail);
|
|
|
|
|
}
|
|
|
|
|
catch (error) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.error('RecorderService.requestScreenshot() error:', error);
|
2024-10-26 16:26:11 -03:00
|
|
|
}
|
2024-10-26 13:05:56 -03:00
|
|
|
}
|
|
|
|
|
async screenshotHandleEvent(_, screenshot) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.screenshotHandleEvent()', { screenshot });
|
2024-10-26 16:26:11 -03:00
|
|
|
this.currentScreenshot = screenshot;
|
|
|
|
|
}
|
|
|
|
|
async keyboardHandleEvent(_, event) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.keyboardHandleEvent()', { key: event.key });
|
2024-10-26 16:26:11 -03:00
|
|
|
if (!this.recording)
|
|
|
|
|
return;
|
|
|
|
|
this.events.push({
|
|
|
|
|
type: 'type',
|
|
|
|
|
identifier: event.key,
|
|
|
|
|
timestamp: Date.now(),
|
|
|
|
|
narration: this.lastTranscription
|
|
|
|
|
});
|
2024-10-26 13:05:56 -03:00
|
|
|
}
|
|
|
|
|
async mouseHandleEvent(_, event) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.mouseHandleEvent()', { x: event.x, y: event.y });
|
2024-10-26 13:05:56 -03:00
|
|
|
if (!this.recording)
|
|
|
|
|
return;
|
|
|
|
|
const analysis = await this.openAIService.analyzeScreen(this.currentScreenshot);
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.mouseHandleEvent() - Screen analysis:', analysis);
|
2024-10-26 13:05:56 -03:00
|
|
|
const element = this.findElementAtPosition(analysis, event.x, event.y);
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.mouseHandleEvent() - Found element:', element);
|
2024-10-26 13:05:56 -03:00
|
|
|
if (element) {
|
|
|
|
|
this.events.push({
|
|
|
|
|
type: 'click',
|
|
|
|
|
identifier: element.identifier,
|
|
|
|
|
timestamp: Date.now(),
|
2024-10-26 16:26:11 -03:00
|
|
|
narration: this.lastTranscription
|
2024-10-26 13:05:56 -03:00
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
findElementAtPosition(analysis, x, y) {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.findElementAtPosition()', { x, y, analysisElementsCount: analysis.elements.length });
|
2024-10-26 13:05:56 -03:00
|
|
|
return analysis.elements.find((element) => {
|
|
|
|
|
const bounds = element.bounds;
|
2024-10-26 21:21:51 -03:00
|
|
|
const found = x >= bounds.x &&
|
2024-10-26 16:26:11 -03:00
|
|
|
x <= bounds.x + bounds.width &&
|
|
|
|
|
y >= bounds.y &&
|
|
|
|
|
y <= bounds.y + bounds.height;
|
2024-10-26 21:21:51 -03:00
|
|
|
if (found) {
|
|
|
|
|
console.log('RecorderService.findElementAtPosition() - Found matching element:', element);
|
|
|
|
|
}
|
|
|
|
|
return found;
|
2024-10-26 13:05:56 -03:00
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
generateBasicCode() {
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.generateBasicCode()', { eventsCount: this.events.length });
|
2024-10-26 13:05:56 -03:00
|
|
|
let basicCode = '10 REM BotDesktop Automation Script\n';
|
|
|
|
|
let lineNumber = 20;
|
|
|
|
|
for (const event of this.events) {
|
2024-10-26 16:26:11 -03:00
|
|
|
basicCode += `${lineNumber} REM ${event.narration}\n`;
|
|
|
|
|
lineNumber += 10;
|
2024-10-26 13:05:56 -03:00
|
|
|
switch (event.type) {
|
|
|
|
|
case 'click':
|
|
|
|
|
basicCode += `${lineNumber} CLICK "${event.identifier}"\n`;
|
|
|
|
|
break;
|
|
|
|
|
case 'type':
|
|
|
|
|
basicCode += `${lineNumber} TYPE "${event.identifier}" "${event.value}"\n`;
|
|
|
|
|
break;
|
2024-10-26 16:26:11 -03:00
|
|
|
case 'move':
|
|
|
|
|
basicCode += `${lineNumber} MOVE "${event.identifier}"\n`;
|
|
|
|
|
break;
|
2024-10-26 13:05:56 -03:00
|
|
|
}
|
|
|
|
|
lineNumber += 10;
|
|
|
|
|
}
|
|
|
|
|
basicCode += `${lineNumber} END\n`;
|
2024-10-26 21:21:51 -03:00
|
|
|
console.log('RecorderService.generateBasicCode() - Generated code:', basicCode);
|
2024-10-26 13:05:56 -03:00
|
|
|
return basicCode;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
exports.RecorderService = RecorderService;
|