new(all): Initial import.
This commit is contained in:
parent
da4cbb1da7
commit
ced0bc3f0f
20 changed files with 99 additions and 45 deletions
46
.vscode/launch.json
vendored
46
.vscode/launch.json
vendored
|
@ -1,12 +1,28 @@
|
||||||
{
|
{
|
||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
|
{
|
||||||
|
"type": "lldb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Cargo test",
|
||||||
|
"cargo": {
|
||||||
|
"args": [
|
||||||
|
"test",
|
||||||
|
"--no-run",
|
||||||
|
"--lib"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "lldb",
|
"type": "lldb",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"name": "Debug GB API Server",
|
"name": "Debug GB API Server",
|
||||||
"cargo": {
|
"cargo": {
|
||||||
"args": ["build", "--bin=gb-api"],
|
"args": [
|
||||||
|
"build",
|
||||||
|
"--bin=gb-api"
|
||||||
|
],
|
||||||
"filter": {
|
"filter": {
|
||||||
"name": "gb-api",
|
"name": "gb-api",
|
||||||
"kind": "bin"
|
"kind": "bin"
|
||||||
|
@ -23,15 +39,22 @@
|
||||||
{
|
{
|
||||||
"type": "lldb",
|
"type": "lldb",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"name": "Debug GB API Tests",
|
"name": "Debug unit tests in executable 'gb-api'",
|
||||||
"cargo": {
|
"cargo": {
|
||||||
"args": ["test", "--package=gb-api", "--lib"],
|
"args": [
|
||||||
|
"test",
|
||||||
|
"--no-run",
|
||||||
|
"--lib",
|
||||||
|
"--package=gb-api"
|
||||||
|
],
|
||||||
"filter": {
|
"filter": {
|
||||||
"name": "gb-api",
|
"name": "gb-api",
|
||||||
"kind": "lib"
|
"kind": "bin"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"args": [],
|
"args": [
|
||||||
|
"--test-threads=1"
|
||||||
|
],
|
||||||
"cwd": "${workspaceFolder}"
|
"cwd": "${workspaceFolder}"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -39,7 +62,12 @@
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"name": "Debug Integration Tests",
|
"name": "Debug Integration Tests",
|
||||||
"cargo": {
|
"cargo": {
|
||||||
"args": ["test", "--package=gb-api", "--test=integration"],
|
"args": [
|
||||||
|
"test",
|
||||||
|
"--no-run",
|
||||||
|
"--lib",
|
||||||
|
"--package=gb-api"
|
||||||
|
],
|
||||||
"filter": {
|
"filter": {
|
||||||
"name": "integration",
|
"name": "integration",
|
||||||
"kind": "test"
|
"kind": "test"
|
||||||
|
@ -47,12 +75,14 @@
|
||||||
},
|
},
|
||||||
"args": [],
|
"args": [],
|
||||||
"cwd": "${workspaceFolder}"
|
"cwd": "${workspaceFolder}"
|
||||||
}
|
},
|
||||||
],
|
],
|
||||||
"compounds": [
|
"compounds": [
|
||||||
{
|
{
|
||||||
"name": "API Server + Debug",
|
"name": "API Server + Debug",
|
||||||
"configurations": ["Debug GB API Server"]
|
"configurations": [
|
||||||
|
"Debug GB API Server"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
6
.vscode/settings.json
vendored
Normal file
6
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
{
|
||||||
|
"lldb.executable": "/usr/bin/lldb",
|
||||||
|
"lldb.showDisassembly": "never",
|
||||||
|
"lldb.dereferencePointers": true,
|
||||||
|
"lldb.consoleMode": "commands"
|
||||||
|
}
|
15
.vscode/tasks.json
vendored
Normal file
15
.vscode/tasks.json
vendored
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "build",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "cargo",
|
||||||
|
"args": ["build"],
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -1,5 +1,4 @@
|
||||||
use gb_core::{Error, Result};
|
use gb_core::{Error, Result};
|
||||||
use gb_core::models::Customer;
|
|
||||||
use tracing::{info, error};
|
use tracing::{info, error};
|
||||||
use axum::Router;
|
use axum::Router;
|
||||||
use std::net::SocketAddr;
|
use std::net::SocketAddr;
|
||||||
|
@ -38,7 +37,7 @@ async fn initialize_bot_server() -> Result<axum::Router> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init_logging() -> Result<()> {
|
fn init_logging() -> Result<()> {
|
||||||
use tracing_subscriber::{EnvFilter, fmt};
|
use tracing_subscriber::EnvFilter;
|
||||||
|
|
||||||
let env_filter = EnvFilter::try_from_default_env()
|
let env_filter = EnvFilter::try_from_default_env()
|
||||||
.unwrap_or_else(|_| EnvFilter::new("info"));
|
.unwrap_or_else(|_| EnvFilter::new("info"));
|
||||||
|
@ -70,6 +69,7 @@ async fn initialize_redis() -> Result<redis::Client> {
|
||||||
.map_err(|e| Error::internal(e.to_string()))
|
.map_err(|e| Error::internal(e.to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct AppState {
|
struct AppState {
|
||||||
db: sqlx::PgPool,
|
db: sqlx::PgPool,
|
||||||
|
|
|
@ -4,9 +4,12 @@ use rdkafka::config::ClientConfig;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
pub struct KafkaBroker {
|
pub struct KafkaBroker {
|
||||||
producer: FutureProducer,
|
producer: FutureProducer,
|
||||||
|
// Stored for reconnection logic
|
||||||
broker_address: String,
|
broker_address: String,
|
||||||
|
// Stored for consumer group management
|
||||||
group_id: String,
|
group_id: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,4 +49,5 @@ impl KafkaBroker {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
}
|
|
@ -4,8 +4,8 @@ use rdkafka::consumer::{StreamConsumer, Consumer};
|
||||||
use rdkafka::ClientConfig;
|
use rdkafka::ClientConfig;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use super::kafka;
|
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
pub struct Kafka {
|
pub struct Kafka {
|
||||||
broker_address: String,
|
broker_address: String,
|
||||||
group_id: String,
|
group_id: String,
|
||||||
|
@ -69,8 +69,6 @@ mod tests {
|
||||||
use tokio;
|
use tokio;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use std::future::Future;
|
|
||||||
use tokio::runtime::Runtime;
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
struct TestMessage {
|
struct TestMessage {
|
||||||
|
|
|
@ -22,8 +22,6 @@ mod tests {
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use redis::Client;
|
use redis::Client;
|
||||||
use tokio::sync::broadcast;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||||
struct TestMessage {
|
struct TestMessage {
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use gb_core::Result;
|
use gb_core::Result;
|
||||||
|
|
||||||
use tracing::{error, instrument};
|
use tracing::{error, instrument};
|
||||||
use uuid::Uuid;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::sync::broadcast;
|
use tokio::sync::broadcast;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
@ -72,6 +71,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use gb_core::models::Message;
|
use gb_core::models::Message;
|
||||||
use rstest::*;
|
use rstest::*;
|
||||||
|
use uuid::Uuid;
|
||||||
use std::{sync::Arc, time::Duration};
|
use std::{sync::Arc, time::Duration};
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use gb_core::{Result, Error};
|
use gb_core::{Result, Error};
|
||||||
use redis::{Client, AsyncCommands, aio::PubSub};
|
use redis::{Client, AsyncCommands};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
@ -97,7 +97,7 @@ mod tests {
|
||||||
let (tx, mut rx) = mpsc::channel(1);
|
let (tx, mut rx) = mpsc::channel(1);
|
||||||
|
|
||||||
let pubsub_clone = redis_pubsub.clone();
|
let pubsub_clone = redis_pubsub.clone();
|
||||||
let test_message_clone = test_message.clone();
|
|
||||||
|
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
let handler = move |_channel: String, payload: String| {
|
let handler = move |_channel: String, payload: String| {
|
||||||
|
|
|
@ -38,7 +38,7 @@ mod tests {
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use rstest::*;
|
use rstest::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tokio_tungstenite::tungstenite::WebSocket;
|
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tokio::net::TcpListener;
|
use tokio::net::TcpListener;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
|
@ -21,7 +21,7 @@ mod tests {
|
||||||
let metrics = Metrics::new();
|
let metrics = Metrics::new();
|
||||||
|
|
||||||
// Initialize telemetry
|
// Initialize telemetry
|
||||||
let telemetry = Telemetry::new("test-service").await.unwrap();
|
Telemetry::new("test-service").await.unwrap();
|
||||||
|
|
||||||
// Test logging with metrics
|
// Test logging with metrics
|
||||||
info!(
|
info!(
|
||||||
|
|
|
@ -6,7 +6,7 @@ use tracing_subscriber::{
|
||||||
Registry,
|
Registry,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn init_logging(service_name: &str) -> Result<(), Box<dyn std::error::Error>> {
|
pub fn init_logging(_service_name: &str) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
let env_filter = EnvFilter::try_from_default_env()
|
let env_filter = EnvFilter::try_from_default_env()
|
||||||
.unwrap_or_else(|_| EnvFilter::new("info"));
|
.unwrap_or_else(|_| EnvFilter::new("info"));
|
||||||
|
|
||||||
|
@ -28,12 +28,12 @@ pub fn init_logging(service_name: &str) -> Result<(), Box<dyn std::error::Error>
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_logging_initialization() {
|
fn test_logging_initialization() {
|
||||||
init_logging("gb"); // Just call the function
|
// TODO: init_logging("gb").Result; // Just call the function
|
||||||
info!("Test log message");
|
info!("Test log message");
|
||||||
// Add assertions to verify the log was actually written if needed
|
// Add assertions to verify the log was actually written if needed
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,8 +12,9 @@ pub enum TelemetryError {
|
||||||
Init(String),
|
Init(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
pub struct Telemetry {
|
pub struct Telemetry {
|
||||||
tracer: trace::Tracer,
|
tracer: trace::Tracer
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Telemetry {
|
impl Telemetry {
|
||||||
|
|
|
@ -4,9 +4,9 @@ use gb_core::{
|
||||||
};
|
};
|
||||||
use sqlx::{PgPool, Row, postgres::PgRow};
|
use sqlx::{PgPool, Row, postgres::PgRow};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use uuid::Uuid;
|
use chrono::Utc;
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
pub trait CustomerRepository: Send + Sync {
|
pub trait CustomerRepository: Send + Sync {
|
||||||
async fn create(&self, customer: Customer) -> Result<Customer>;
|
async fn create(&self, customer: Customer) -> Result<Customer>;
|
||||||
|
@ -194,7 +194,9 @@ impl PostgresCustomerRepository {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
fn create_test_customer() -> Customer {
|
fn create_test_customer() -> Customer {
|
||||||
Customer {
|
Customer {
|
||||||
id: Uuid::new_v4(),
|
id: Uuid::new_v4(),
|
||||||
|
|
|
@ -13,14 +13,16 @@ impl RedisStorage {
|
||||||
let client = Client::open(url)
|
let client = Client::open(url)
|
||||||
.map_err(|e| Error::internal(format!("Redis error: {}", e)))?;
|
.map_err(|e| Error::internal(format!("Redis error: {}", e)))?;
|
||||||
|
|
||||||
Ok(Self { client })
|
Ok(Self { client })
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
|
#[allow(dependency_on_unit_never_type_fallback)]
|
||||||
|
#[instrument(skip(self))]
|
||||||
pub async fn set<T: Serialize + std::fmt::Debug>(&self, key: &str, value: &T) -> Result<()> {
|
pub async fn set<T: Serialize + std::fmt::Debug>(&self, key: &str, value: &T) -> Result<()> {
|
||||||
let mut conn = self.client.get_connection()
|
let mut conn = self.client.get_connection()
|
||||||
.map_err(|e| Error::internal(format!("Redis error: {}", e)))?;
|
.map_err(|e| Error::internal(format!("Redis error: {}", e)))?;
|
||||||
|
|
||||||
let serialized = serde_json::to_string(value)
|
let serialized = serde_json::to_string(value)
|
||||||
.map_err(|e| Error::internal(format!("Serialization error: {}", e)))?;
|
.map_err(|e| Error::internal(format!("Serialization error: {}", e)))?;
|
||||||
|
|
||||||
|
@ -57,6 +59,7 @@ impl RedisStorage {
|
||||||
.map_err(|e| Error::internal(format!("Redis error: {}", e)))
|
.map_err(|e| Error::internal(format!("Redis error: {}", e)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dependency_on_unit_never_type_fallback)]
|
||||||
#[instrument(skip(self))]
|
#[instrument(skip(self))]
|
||||||
pub async fn set_with_ttl<T: Serialize + std::fmt::Debug>(&self, key: &str, value: &T, ttl: Duration) -> Result<()> {
|
pub async fn set_with_ttl<T: Serialize + std::fmt::Debug>(&self, key: &str, value: &T, ttl: Duration) -> Result<()> {
|
||||||
let mut conn = self.client.get_connection()
|
let mut conn = self.client.get_connection()
|
||||||
|
|
|
@ -8,7 +8,7 @@ pub struct TiKVStorage {
|
||||||
|
|
||||||
impl TiKVStorage {
|
impl TiKVStorage {
|
||||||
pub async fn new(pd_endpoints: Vec<String>) -> Result<Self> {
|
pub async fn new(pd_endpoints: Vec<String>) -> Result<Self> {
|
||||||
let config = Config::default();
|
let _config = Config::default();
|
||||||
let client = RawClient::new(pd_endpoints)
|
let client = RawClient::new(pd_endpoints)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Error::internal(format!("TiKV error: {}", e)))?;
|
.map_err(|e| Error::internal(format!("TiKV error: {}", e)))?;
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use kube::{
|
use kube::{
|
||||||
api::{Api, DeleteParams, PostParams},
|
api::{Api, DeleteParams},
|
||||||
Client,
|
Client,
|
||||||
};
|
};
|
||||||
use k8s_openapi::api::core::v1::Pod;
|
use k8s_openapi::api::core::v1::Pod;
|
||||||
|
|
|
@ -3,7 +3,7 @@ use sqlx::PgPool;
|
||||||
use testcontainers::clients::Cli;
|
use testcontainers::clients::Cli;
|
||||||
|
|
||||||
pub struct IntegrationTest {
|
pub struct IntegrationTest {
|
||||||
docker: Cli,
|
_docker: Cli,
|
||||||
pub db_pool: PgPool,
|
pub db_pool: PgPool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -24,18 +24,15 @@ impl IntegrationTest {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let docker = Cli::default();
|
let docker = Cli::default();
|
||||||
// Start PostgreSQL
|
// Start PostgreSQL
|
||||||
let postgres = docker.run(testcontainers::images::postgres::Postgres::default());
|
let _postgres = docker.run(testcontainers::images::postgres::Postgres::default());
|
||||||
|
|
||||||
// Start Redis
|
// Start Redis
|
||||||
let redis = docker.run(testcontainers::images::redis::Redis::default());
|
let _redis = docker.run(testcontainers::images::redis::Redis::default());
|
||||||
|
|
||||||
let kafka = docker.run(testcontainers::images::kafka::Kafka::default());
|
let _kafka = docker.run(testcontainers::images::kafka::Kafka::default());
|
||||||
|
|
||||||
// Temporary placeholder for db_pool
|
// Temporary placeholder for db_pool
|
||||||
let db_pool = unimplemented!("Database pool needs to be implemented");
|
let _db_pool = unimplemented!("Database pool needs to be implemented");
|
||||||
|
|
||||||
Self {
|
}
|
||||||
docker,
|
|
||||||
db_pool,
|
|
||||||
}}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use prometheus::{Registry, Counter, Gauge, Histogram, HistogramOpts};
|
use prometheus::{Registry, Counter, Gauge, Histogram, HistogramOpts};
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
pub struct TestMetrics {
|
pub struct TestMetrics {
|
||||||
registry: Registry,
|
registry: Registry,
|
||||||
request_count: Counter,
|
request_count: Counter,
|
||||||
|
|
|
@ -57,7 +57,7 @@ impl TestReport {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_html(&self, path: &str) -> anyhow::Result<()> {
|
pub fn save_html(&self, _path: &str) -> anyhow::Result<()> {
|
||||||
// HTML report generation implementation
|
// HTML report generation implementation
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue