refactor(config): remove unused S3 bucket configuration and setup logic
Eliminates the `s3_bucket` field from `AppConfig` and deletes related initialization code in `main.rs`. This simplifies configuration management since S3 bucket handling is no longer required or used in the application.
This commit is contained in:
parent
21fdb326a6
commit
663864cb78
14 changed files with 120 additions and 71 deletions
13
add-req.sh
13
add-req.sh
|
|
@ -20,25 +20,24 @@ done
|
||||||
|
|
||||||
dirs=(
|
dirs=(
|
||||||
# "auth"
|
# "auth"
|
||||||
"automation"
|
# "automation"
|
||||||
"basic"
|
# "basic"
|
||||||
# "bot"
|
# "bot"
|
||||||
"bootstrap"
|
"bootstrap"
|
||||||
# "package_manager"
|
"package_manager"
|
||||||
# "channels"
|
# "channels"
|
||||||
# "config"
|
# "config"
|
||||||
# "context"
|
# "context"
|
||||||
# "email"
|
# "email"
|
||||||
# "file"
|
"file"
|
||||||
# "llm"
|
# "llm"
|
||||||
"drive_monitor"
|
"drive_monitor"
|
||||||
# "llm_legacy"
|
# "llm_legacy"
|
||||||
# "org"
|
# "org"
|
||||||
# "session"
|
# "session"
|
||||||
"file"
|
#"kb"
|
||||||
"kb"
|
|
||||||
"shared"
|
"shared"
|
||||||
"tests"
|
#"tests"
|
||||||
# "tools"
|
# "tools"
|
||||||
# "web_automation"
|
# "web_automation"
|
||||||
# "whatsapp"
|
# "whatsapp"
|
||||||
|
|
|
||||||
|
|
@ -175,7 +175,7 @@ pub async fn get_from_bucket(
|
||||||
"App configuration missing".into()
|
"App configuration missing".into()
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let org_prefix = &cfg.minio.org_prefix;
|
let org_prefix = &cfg.drive.org_prefix;
|
||||||
|
|
||||||
if org_prefix.contains("..") || org_prefix.contains('/') || org_prefix.contains('\\') {
|
if org_prefix.contains("..") || org_prefix.contains('/') || org_prefix.contains('\\') {
|
||||||
error!("Invalid org_prefix: {}", org_prefix);
|
error!("Invalid org_prefix: {}", org_prefix);
|
||||||
|
|
|
||||||
|
|
@ -2,11 +2,10 @@ use crate::config::AppConfig;
|
||||||
use crate::package_manager::{InstallMode, PackageManager};
|
use crate::package_manager::{InstallMode, PackageManager};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use diesel::connection::SimpleConnection;
|
use diesel::connection::SimpleConnection;
|
||||||
use diesel::{Connection, QueryableByName};
|
|
||||||
use diesel::RunQueryDsl;
|
use diesel::RunQueryDsl;
|
||||||
|
use diesel::{Connection, QueryableByName};
|
||||||
use dotenvy::dotenv;
|
use dotenvy::dotenv;
|
||||||
use log::{error, info};
|
use log::{error, info, trace};
|
||||||
use opendal::services::S3;
|
|
||||||
use opendal::Operator;
|
use opendal::Operator;
|
||||||
use rand::distr::Alphanumeric;
|
use rand::distr::Alphanumeric;
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
|
|
@ -29,6 +28,7 @@ pub struct ComponentInfo {
|
||||||
pub struct BootstrapManager {
|
pub struct BootstrapManager {
|
||||||
pub install_mode: InstallMode,
|
pub install_mode: InstallMode,
|
||||||
pub tenant: Option<String>,
|
pub tenant: Option<String>,
|
||||||
|
pub s3_operator: Operator,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BootstrapManager {
|
impl BootstrapManager {
|
||||||
|
|
@ -37,9 +37,12 @@ impl BootstrapManager {
|
||||||
"Initializing BootstrapManager with mode {:?} and tenant {:?}",
|
"Initializing BootstrapManager with mode {:?} and tenant {:?}",
|
||||||
install_mode, tenant
|
install_mode, tenant
|
||||||
);
|
);
|
||||||
|
let config = AppConfig::from_env();
|
||||||
|
let s3_operator = Self::create_s3_operator(&config);
|
||||||
Self {
|
Self {
|
||||||
install_mode,
|
install_mode,
|
||||||
tenant,
|
tenant,
|
||||||
|
s3_operator,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -289,9 +292,21 @@ impl BootstrapManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.s3_operator = Self::create_s3_operator(&config);
|
||||||
Ok(config)
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn create_s3_operator(config: &AppConfig) -> Operator {
|
||||||
|
use opendal::Scheme;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
let mut map = HashMap::new();
|
||||||
|
map.insert("endpoint".to_string(), config.drive.server.clone());
|
||||||
|
map.insert("access_key_id".to_string(), config.drive.access_key.clone());
|
||||||
|
map.insert("secret_access_key".to_string(), config.drive.secret_key.clone());
|
||||||
|
trace!("Creating S3 operator with endpoint {}", config.drive.server);
|
||||||
|
Operator::via_iter(Scheme::S3, map).expect("Failed to initialize S3 operator")
|
||||||
|
}
|
||||||
|
|
||||||
fn generate_secure_password(&self, length: usize) -> String {
|
fn generate_secure_password(&self, length: usize) -> String {
|
||||||
let mut rng = rand::rng();
|
let mut rng = rand::rng();
|
||||||
std::iter::repeat_with(|| rng.sample(Alphanumeric) as char)
|
std::iter::repeat_with(|| rng.sample(Alphanumeric) as char)
|
||||||
|
|
@ -307,7 +322,7 @@ impl BootstrapManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_bot_config(&self, bot_id: &uuid::Uuid, component: &str) -> Result<()> {
|
fn update_bot_config(&self, bot_id: &uuid::Uuid, component: &str) -> Result<()> {
|
||||||
use diesel::sql_types::{Uuid as SqlUuid, Text};
|
use diesel::sql_types::{Text, Uuid as SqlUuid};
|
||||||
let database_url = std::env::var("DATABASE_URL")
|
let database_url = std::env::var("DATABASE_URL")
|
||||||
.unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string());
|
.unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string());
|
||||||
let mut conn = diesel::pg::PgConnection::establish(&database_url)?;
|
let mut conn = diesel::pg::PgConnection::establish(&database_url)?;
|
||||||
|
|
@ -321,7 +336,7 @@ impl BootstrapManager {
|
||||||
"INSERT INTO bot_configuration (id, bot_id, config_key, config_value, config_type)
|
"INSERT INTO bot_configuration (id, bot_id, config_key, config_value, config_type)
|
||||||
VALUES ($1, $2, $3, $4, 'string')
|
VALUES ($1, $2, $3, $4, 'string')
|
||||||
ON CONFLICT (config_key)
|
ON CONFLICT (config_key)
|
||||||
DO UPDATE SET config_value = EXCLUDED.config_value, updated_at = NOW()"
|
DO UPDATE SET config_value = EXCLUDED.config_value, updated_at = NOW()",
|
||||||
)
|
)
|
||||||
.bind::<SqlUuid, _>(new_id)
|
.bind::<SqlUuid, _>(new_id)
|
||||||
.bind::<SqlUuid, _>(bot_id)
|
.bind::<SqlUuid, _>(bot_id)
|
||||||
|
|
@ -332,39 +347,53 @@ impl BootstrapManager {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn upload_templates_to_minio(&self, config: &AppConfig) -> Result<()> {
|
pub async fn upload_templates_to_drive(&self, config: &AppConfig) -> Result<()> {
|
||||||
let database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| config.database_url());
|
let database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| config.database_url());
|
||||||
let mut conn = diesel::PgConnection::establish(&database_url)?;
|
let mut conn = diesel::PgConnection::establish(&database_url)?;
|
||||||
self.create_bots_from_templates(&mut conn)?;
|
self.create_bots_from_templates(&mut conn)?;
|
||||||
|
|
||||||
let client = Operator::new(
|
|
||||||
S3::default()
|
|
||||||
.root("/")
|
|
||||||
.endpoint(&config.minio.server)
|
|
||||||
.access_key_id(&config.minio.access_key)
|
|
||||||
.secret_access_key(&config.minio.secret_key),
|
|
||||||
)?
|
|
||||||
.finish();
|
|
||||||
|
|
||||||
let templates_dir = Path::new("templates");
|
let templates_dir = Path::new("templates");
|
||||||
if !templates_dir.exists() {
|
if !templates_dir.exists() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
let operator = &self.s3_operator;
|
||||||
for entry in std::fs::read_dir(templates_dir)? {
|
for entry in std::fs::read_dir(templates_dir)? {
|
||||||
|
let bot_name = templates_dir
|
||||||
|
.read_dir()?
|
||||||
|
.filter_map(|e| e.ok())
|
||||||
|
.find(|e| {
|
||||||
|
e.path().is_dir()
|
||||||
|
&& e.path()
|
||||||
|
.file_name()
|
||||||
|
.unwrap()
|
||||||
|
.to_string_lossy()
|
||||||
|
.ends_with(".gbai")
|
||||||
|
})
|
||||||
|
.map(|e| {
|
||||||
|
let name = e.path().file_name().unwrap().to_string_lossy().to_string();
|
||||||
|
name
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| "default".to_string());
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
if path.is_dir() && path.extension().map(|e| e == "gbai").unwrap_or(false) {
|
if path.is_dir()
|
||||||
|
&& path
|
||||||
|
.file_name()
|
||||||
|
.unwrap()
|
||||||
|
.to_string_lossy()
|
||||||
|
.ends_with(".gbai")
|
||||||
|
{
|
||||||
let bot_name = path.file_name().unwrap().to_string_lossy().to_string();
|
let bot_name = path.file_name().unwrap().to_string_lossy().to_string();
|
||||||
|
let bucket = bot_name.clone();
|
||||||
self.upload_directory_recursive(&client, &path, &bot_name, "")
|
info!("Uploading template {} to Drive bucket {}", bot_name, bucket);
|
||||||
|
self.upload_directory_recursive(&operator, &path, &bucket, &bot_name)
|
||||||
.await?;
|
.await?;
|
||||||
|
info!("Uploaded template {} to Drive bucket {}", bot_name, bucket);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn create_bots_from_templates(&self, conn: &mut diesel::PgConnection) -> Result<()> {
|
fn create_bots_from_templates(&self, conn: &mut diesel::PgConnection) -> Result<()> {
|
||||||
use crate::shared::models::schema::bots;
|
use crate::shared::models::schema::bots;
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
|
|
@ -425,6 +454,16 @@ impl BootstrapManager {
|
||||||
prefix: &'a str,
|
prefix: &'a str,
|
||||||
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<()>> + 'a>> {
|
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<()>> + 'a>> {
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
|
trace!("Checking bucket existence: {}", bucket);
|
||||||
|
if client.stat(bucket).await.is_err() {
|
||||||
|
info!("Bucket {} not found, creating it", bucket);
|
||||||
|
trace!("Creating bucket: {}", bucket);
|
||||||
|
client.create_dir(bucket).await?;
|
||||||
|
trace!("Bucket {} created successfully", bucket);
|
||||||
|
} else {
|
||||||
|
trace!("Bucket {} already exists", bucket);
|
||||||
|
}
|
||||||
|
trace!("Starting upload from local path: {}", local_path.display());
|
||||||
for entry in std::fs::read_dir(local_path)? {
|
for entry in std::fs::read_dir(local_path)? {
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
|
|
@ -443,7 +482,18 @@ impl BootstrapManager {
|
||||||
key
|
key
|
||||||
);
|
);
|
||||||
let content = std::fs::read(&path)?;
|
let content = std::fs::read(&path)?;
|
||||||
|
trace!(
|
||||||
|
"Writing file {} to bucket {} with key {}",
|
||||||
|
path.display(),
|
||||||
|
bucket,
|
||||||
|
key
|
||||||
|
);
|
||||||
client.write(&key, content).await?;
|
client.write(&key, content).await?;
|
||||||
|
trace!(
|
||||||
|
"Successfully wrote file {} to bucket {}",
|
||||||
|
path.display(),
|
||||||
|
bucket
|
||||||
|
);
|
||||||
} else if path.is_dir() {
|
} else if path.is_dir() {
|
||||||
self.upload_directory_recursive(client, &path, bucket, &key)
|
self.upload_directory_recursive(client, &path, bucket, &key)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
|
||||||
|
|
@ -8,14 +8,13 @@ use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AppConfig {
|
pub struct AppConfig {
|
||||||
pub minio: DriveConfig,
|
pub drive: DriveConfig,
|
||||||
pub server: ServerConfig,
|
pub server: ServerConfig,
|
||||||
pub database: DatabaseConfig,
|
pub database: DatabaseConfig,
|
||||||
pub database_custom: DatabaseConfig,
|
pub database_custom: DatabaseConfig,
|
||||||
pub email: EmailConfig,
|
pub email: EmailConfig,
|
||||||
pub ai: AIConfig,
|
pub ai: AIConfig,
|
||||||
pub site_path: String,
|
pub site_path: String,
|
||||||
pub s3_bucket: String,
|
|
||||||
pub stack_path: PathBuf,
|
pub stack_path: PathBuf,
|
||||||
pub db_conn: Option<Arc<Mutex<PgConnection>>>,
|
pub db_conn: Option<Arc<Mutex<PgConnection>>>,
|
||||||
}
|
}
|
||||||
|
|
@ -218,7 +217,7 @@ impl AppConfig {
|
||||||
};
|
};
|
||||||
|
|
||||||
AppConfig {
|
AppConfig {
|
||||||
minio,
|
drive: minio,
|
||||||
server: ServerConfig {
|
server: ServerConfig {
|
||||||
host: get_str("SERVER_HOST", "127.0.0.1"),
|
host: get_str("SERVER_HOST", "127.0.0.1"),
|
||||||
port: get_u16("SERVER_PORT", 8080),
|
port: get_u16("SERVER_PORT", 8080),
|
||||||
|
|
@ -227,7 +226,6 @@ impl AppConfig {
|
||||||
database_custom,
|
database_custom,
|
||||||
email,
|
email,
|
||||||
ai,
|
ai,
|
||||||
s3_bucket: get_str("DRIVE_BUCKET", "default"),
|
|
||||||
site_path: get_str("SITES_ROOT", "./botserver-stack/sites"),
|
site_path: get_str("SITES_ROOT", "./botserver-stack/sites"),
|
||||||
stack_path,
|
stack_path,
|
||||||
db_conn: None,
|
db_conn: None,
|
||||||
|
|
@ -300,7 +298,7 @@ impl AppConfig {
|
||||||
};
|
};
|
||||||
|
|
||||||
AppConfig {
|
AppConfig {
|
||||||
minio,
|
drive: minio,
|
||||||
server: ServerConfig {
|
server: ServerConfig {
|
||||||
host: std::env::var("SERVER_HOST").unwrap_or_else(|_| "127.0.0.1".to_string()),
|
host: std::env::var("SERVER_HOST").unwrap_or_else(|_| "127.0.0.1".to_string()),
|
||||||
port: std::env::var("SERVER_PORT")
|
port: std::env::var("SERVER_PORT")
|
||||||
|
|
@ -312,7 +310,6 @@ impl AppConfig {
|
||||||
database_custom,
|
database_custom,
|
||||||
email,
|
email,
|
||||||
ai,
|
ai,
|
||||||
s3_bucket: std::env::var("DRIVE_BUCKET").unwrap_or_else(|_| "default".to_string()),
|
|
||||||
site_path: std::env::var("SITES_ROOT")
|
site_path: std::env::var("SITES_ROOT")
|
||||||
.unwrap_or_else(|_| "./botserver-stack/sites".to_string()),
|
.unwrap_or_else(|_| "./botserver-stack/sites".to_string()),
|
||||||
stack_path: PathBuf::from(stack_path),
|
stack_path: PathBuf::from(stack_path),
|
||||||
|
|
|
||||||
30
src/main.rs
30
src/main.rs
|
|
@ -140,7 +140,7 @@ async fn main() -> std::io::Result<()> {
|
||||||
|
|
||||||
|
|
||||||
let _ = bootstrap.start_all();
|
let _ = bootstrap.start_all();
|
||||||
if let Err(e) = bootstrap.upload_templates_to_minio(&cfg).await {
|
if let Err(e) = bootstrap.upload_templates_to_drive(&cfg).await {
|
||||||
log::warn!("Failed to upload templates to MinIO: {}", e);
|
log::warn!("Failed to upload templates to MinIO: {}", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -193,32 +193,8 @@ async fn main() -> std::io::Result<()> {
|
||||||
));
|
));
|
||||||
let tool_api = Arc::new(tools::ToolApi::new());
|
let tool_api = Arc::new(tools::ToolApi::new());
|
||||||
|
|
||||||
use opendal::services::S3;
|
|
||||||
use opendal::Operator;
|
|
||||||
use opendal::ErrorKind;
|
|
||||||
|
|
||||||
async fn ensure_bucket_exists(cfg: &AppConfig) {
|
let drive = init_drive(&config.drive)
|
||||||
let builder = S3::default()
|
|
||||||
.endpoint(&cfg.minio.server)
|
|
||||||
.access_key_id(&cfg.minio.access_key)
|
|
||||||
.secret_access_key(&cfg.minio.secret_key)
|
|
||||||
.bucket(&cfg.s3_bucket)
|
|
||||||
.root("/");
|
|
||||||
let op = Operator::new(builder).unwrap().finish();
|
|
||||||
match op.stat("/").await {
|
|
||||||
Ok(_) => info!("Bucket {} exists", cfg.s3_bucket),
|
|
||||||
Err(e) if e.kind() == ErrorKind::NotFound => {
|
|
||||||
if let Err(err) = op.create_dir("/").await {
|
|
||||||
info!("Created bucket {}: {:?}", cfg.s3_bucket, err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => info!("Bucket check failed: {:?}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ensure_bucket_exists(&config).await;
|
|
||||||
|
|
||||||
let drive = init_drive(&config.minio)
|
|
||||||
.await
|
.await
|
||||||
.expect("Failed to initialize Drive");
|
.expect("Failed to initialize Drive");
|
||||||
|
|
||||||
|
|
@ -279,7 +255,7 @@ async fn main() -> std::io::Result<()> {
|
||||||
|
|
||||||
let drive_state = app_state.clone();
|
let drive_state = app_state.clone();
|
||||||
let bot_guid = std::env::var("BOT_GUID").unwrap_or_else(|_| "default_bot".to_string());
|
let bot_guid = std::env::var("BOT_GUID").unwrap_or_else(|_| "default_bot".to_string());
|
||||||
let bucket_name = format!("{}{}.gbai", cfg.minio.org_prefix, bot_guid);
|
let bucket_name = format!("{}{}.gbai", cfg.drive.org_prefix, bot_guid);
|
||||||
let drive_monitor = Arc::new(DriveMonitor::new(drive_state, bucket_name));
|
let drive_monitor = Arc::new(DriveMonitor::new(drive_state, bucket_name));
|
||||||
let _drive_handle = drive_monitor.spawn();
|
let _drive_handle = drive_monitor.spawn();
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
name,value
|
||||||
|
prompt-compact, 10
|
||||||
|
prompt-cache,true
|
||||||
|
prompt-fixed-kb,geral
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
PARAM subject as string
|
||||||
|
DESCRIPTION "Chamado quando alguém quer mudar o assunto da conversa."
|
||||||
|
|
||||||
|
kbname = LLM "Devolva uma única palavra circular, comunicado ou geral de acordo com a seguinte frase:" + subject
|
||||||
|
|
||||||
|
ADD_KB kbname
|
||||||
|
|
||||||
|
|
||||||
|
TALK "You have chosen to change the subject to " + subject + "."
|
||||||
|
|
@ -1,9 +1,17 @@
|
||||||
LET resume = GET_BOT_MEMORY("resume")
|
LET resume1 = GET_BOT_MEMORY("general")
|
||||||
|
LET resume2 = GET_BOT_MEMORY("auxiliom")
|
||||||
|
LET resume3 = GET_BOT_MEMORY("toolbix")
|
||||||
|
|
||||||
IF resume <> "" THEN
|
SET_CONTEXT "general", resume1
|
||||||
TALK resume
|
SET_CONTEXT "auxiliom", resume2
|
||||||
END IF
|
SET_CONTEXT "toolbix", resume3
|
||||||
|
|
||||||
|
|
||||||
|
ADD_SUGGESTION "general", "Show me the weekly announcements"
|
||||||
|
ADD_SUGGESTION "auxiliom", "Will Auxiliom help me with what?"
|
||||||
|
ADD_SUGGESTION "auxiliom", "What does Auxiliom do?"
|
||||||
|
ADD_SUGGESTION "toolbix", "Show me Toolbix features"
|
||||||
|
ADD_SUGGESTION "toolbix", "How can Toolbix help my business?"
|
||||||
|
|
||||||
ADD_KB "weekly"
|
|
||||||
|
|
||||||
TALK "You can ask me about any of the announcements or circulars."
|
TALK "You can ask me about any of the announcements or circulars."
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,11 @@
|
||||||
|
|
||||||
let text = GET "default.gbdrive/default.pdf"
|
let text = GET "announcements.gbkb/news/news.pdf"
|
||||||
let resume = LLM "Resume this document, in a table (DO NOT THINK) no_think: " + text
|
let resume = LLM "Resume this document, in a table (DO NOT THINK) no_think: " + text
|
||||||
|
|
||||||
SET_BOT_MEMORY "resume", resume
|
SET_BOT_MEMORY "resume", resume
|
||||||
|
|
||||||
|
let text1 = GET "announcements.gbkb/auxiliom/auxiliom.pdf"
|
||||||
|
SET_BOT_MEMORY "auxiliom", text1
|
||||||
|
|
||||||
|
let text2 = GET "announcements.gbkb/toolbix/toolbix.pdf"
|
||||||
|
SET_BOT_MEMORY "toolbix", text2
|
||||||
Binary file not shown.
BIN
templates/announcements.gbai/announcements.gbkb/news/news.pdf
Normal file
BIN
templates/announcements.gbai/announcements.gbkb/news/news.pdf
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,8 +1,8 @@
|
||||||
name,value
|
name,value
|
||||||
|
|
||||||
server_host=0.0.0.0
|
server_host,0.0.0.0
|
||||||
server_port=8080
|
server_port,8080
|
||||||
sites_root=/tmp
|
sites_root,/tmp
|
||||||
|
|
||||||
llm-key,gsk_
|
llm-key,gsk_
|
||||||
llm-model,openai/gpt-oss-20b
|
llm-model,openai/gpt-oss-20b
|
||||||
|
|
|
||||||
|
Loading…
Add table
Reference in a new issue