Skip to content

Commit

Permalink
feat: implemented file logging
Browse files Browse the repository at this point in the history
  • Loading branch information
stefa168 committed Nov 21, 2023
1 parent 7d052e5 commit 1cebefb
Show file tree
Hide file tree
Showing 7 changed files with 71 additions and 38 deletions.
22 changes: 7 additions & 15 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
# Created by https://www.toptal.com/developers/gitignore/api/intellij,rust
# Edit at https://www.toptal.com/developers/gitignore?templates=intellij,rust

logs
timescale-data
.idea/dataSources.xml
.idea/git_toolbox_prj.xml
.idea/remote-targets.xml

### Intellij ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
Expand Down Expand Up @@ -31,19 +37,6 @@
.idea/**/gradle.xml
.idea/**/libraries

# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr

# CMake
cmake-build-*/

Expand Down Expand Up @@ -130,5 +123,4 @@ Cargo.lock
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb

# End of https://www.toptal.com/developers/gitignore/api/intellij,rust
/timescale-data/
# End of https://www.toptal.com/developers/gitignore/api/intellij,rust
1 change: 1 addition & 0 deletions .idea/growatt_server.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,11 @@ bytes = "1.5.0"
chrono = "0.4.31"
clap = { version = "4.4.8", features = ["color", "cargo"] }
anyhow = { version = "1.0.75", features = ["backtrace"] }

# Logging dependencies
tracing = { version = "0.1.40", features = ["attributes", "log"] }
tracing-subscriber = { version = "0.3.18",features = ["env-filter", "chrono"] }
tracing-appender = "0.2.3"
tracing-futures = "0.2.5"
tracing-unwrap = "0.10.0"
tracing-panic = "0.1.1"
Expand Down
2 changes: 2 additions & 0 deletions config.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
log_level: INFO

database:
username: postgres
password: password
Expand Down
1 change: 1 addition & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ pub struct Config {
pub database: DbConfig,
pub listen_port: Option<u16>,
pub remote_address: Option<String>,
pub logging_level: Option<String>,
}

#[derive(Serialize, Deserialize, Debug)]
Expand Down
3 changes: 1 addition & 2 deletions src/data_message.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::f32;
use std::sync::Arc;
use tracing::{debug, info};
use tracing::debug;

#[derive(Debug)]
pub struct DataMessage {
Expand All @@ -29,7 +29,6 @@ impl DataMessage {
let mut data = HashMap::new();

let time = Local::now();
info!("{}", time);

for fragment in inverter_fragments.iter() {
let base_offset = fragment.offset as usize;
Expand Down
77 changes: 56 additions & 21 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::config::Config;
use anyhow::{Context, Result};
use clap::{arg, crate_authors, crate_description, crate_name, crate_version, Command};
use data_message::DataMessage;
Expand All @@ -9,6 +10,7 @@ use sqlx::PgPool;
use std::fmt::Write;
use std::io;
use std::net::SocketAddr;
use std::str::FromStr;
use std::sync::Arc;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::net::{TcpListener, TcpStream};
Expand All @@ -18,6 +20,7 @@ use tokio::{fs, signal};
use tokio_util::sync::CancellationToken;
use tracing::level_filters::LevelFilter;
use tracing::{debug, error, info, instrument};
use tracing_appender::non_blocking::WorkerGuard;
use tracing_panic::panic_hook;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
Expand Down Expand Up @@ -54,28 +57,24 @@ pub struct GrowattV6EnergyFragment {
#[tokio::main]
#[instrument]
async fn main() -> Result<()> {
let filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.with_env_var("LOG_LEVEL")
.from_env_lossy();

tracing_subscriber::registry()
.with(fmt::layer())
.with(filter)
.init();

std::panic::set_hook(Box::new(panic_hook));

info!("{} version {} started.", crate_name!(), crate_version!());

// First thing: load the arguments and configuration file.
let args = get_cli_conf().get_matches();

let config_path: &String = args.get_one("config_path").unwrap();
info!("Loading configuration from `{}`", config_path);

let config = config::load_from_yaml(config_path)
.await
.expect_or_log("Failed to load the configuration file");
.context("Failed to load the configuration file")?;

// Set up logging

let _logger_guard = init_logging(&config);

// Hook to log also panics with tracing
std::panic::set_hook(Box::new(panic_hook));

// Finally starting!
info!("{} version {} started.", crate_name!(), crate_version!());

let db_opts = PgConnectOptions::new()
.username(&config.database.username)
Expand Down Expand Up @@ -155,6 +154,28 @@ async fn main() -> Result<()> {
Ok(())
}

fn init_logging(config: &Config) -> WorkerGuard {
let base_logging = config.logging_level.clone().unwrap_or("info".to_string());
let base_logging = LevelFilter::from_str(&base_logging).unwrap();

let filter = EnvFilter::builder()
.with_default_directive(base_logging.into())
.with_env_var("LOG_LEVEL")
.from_env_lossy();

let file_appender = tracing_appender::rolling::daily("./logs", "growatt_server");
let (non_blocking, _guard) = tracing_appender::non_blocking(file_appender);

tracing_subscriber::registry()
.with(fmt::layer())
// .with(filter)
.with(filter)
.with(fmt::layer().with_writer(non_blocking))
.init();

_guard
}

struct ConnectionHandler {
inverter: Arc<Vec<GrowattV6EnergyFragment>>,
db_pool: sqlx::Pool<sqlx::Postgres>,
Expand All @@ -165,18 +186,32 @@ impl ConnectionHandler {
async fn handle_data<'a>(&self, data: &'a [u8]) -> &'a [u8] {
let bytes = utils::unscramble_data(data);

let data_length = u16::from_be_bytes(bytes[4..6].try_into().unwrap());

fn byte_to_type(b: u8) -> String {
match b {
0x03 => "Data3".to_string(),
0x04 => "Data4".to_string(),
0x16 => "Ping".to_string(),
0x18 => "Configure".to_string(),
0x19 => "Identify".to_string(),
v => format!("Unknown ({})", v),
}
}

info!(
"New message! {}",
"New {} message received, {} bytes long.",
byte_to_type(bytes[7]),
data_length
);
debug!(
"Message data: {}",
bytes.iter().fold(String::new(), |mut output, b| {
write!(output, "{:02x}", b).unwrap();
output
})
);

let data_length = u16::from_be_bytes(bytes[4..6].try_into().unwrap());

debug!("Data length: {data_length} bytes");

let message = match bytes[7] {
0x03 => DataMessage::placeholder(&bytes, MessageType::Data3),
0x04 => DataMessage::data4(self.inverter.clone(), &bytes),
Expand Down

0 comments on commit 1cebefb

Please sign in to comment.