Skip to content

Commit

Permalink
add trait Rolling
Browse files Browse the repository at this point in the history
  • Loading branch information
zhuxiujia committed Sep 19, 2023
1 parent 96f74c4 commit dbf9e5b
Show file tree
Hide file tree
Showing 5 changed files with 107 additions and 81 deletions.
9 changes: 5 additions & 4 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,15 +90,16 @@ impl Config {
self
}
/// add a FileSplitAppender
pub fn file_split<P: Packer + 'static, R: Rolling + 'static>(
pub fn file_split<P: Packer + Sync + 'static, R: Rolling + 'static>(
self,
file_path: &str,
temp_size: LogSize,
rolling_type: R,
packer: P,
) -> Self {
self.appends.push(Mutex::new(Box::new(
FileSplitAppender::<RawFile>::new(file_path, temp_size, rolling_type, packer).unwrap(),
FileSplitAppender::<RawFile, P>::new(file_path, temp_size, rolling_type, packer)
.unwrap(),
)));
self
}
Expand All @@ -117,15 +118,15 @@ impl Config {
// LogPacker {},
// ),
// );
pub fn split<F: SplitFile + 'static, R: Rolling + 'static, P: Packer + 'static>(
pub fn split<F: SplitFile + 'static, R: Rolling + 'static, P: Packer + Sync + 'static>(
self,
file_path: &str,
temp_size: LogSize,
rolling_type: R,
packer: P,
) -> Self {
self.appends.push(Mutex::new(Box::new(
FileSplitAppender::<F>::new(file_path, temp_size, rolling_type, packer).unwrap(),
FileSplitAppender::<F, P>::new(file_path, temp_size, rolling_type, packer).unwrap(),
)));
self
}
Expand Down
4 changes: 2 additions & 2 deletions src/plugin/file_loop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@ use crate::plugin::rolling::RollingNum;

/// Single logs are stored in rolling mode by capacity
pub struct FileLoopAppender<F: SplitFile> {
file: FileSplitAppender<F>,
file: FileSplitAppender<F, LogPacker>,
}

impl<F: SplitFile> FileLoopAppender<F> {
pub fn new(log_file_path: &str, size: LogSize) -> Result<FileLoopAppender<F>, LogError> {
Ok(Self {
file: FileSplitAppender::<F>::new(
file: FileSplitAppender::<F, LogPacker>::new(
log_file_path,
size,
RollingNum { num: 1 },
Expand Down
148 changes: 83 additions & 65 deletions src/plugin/file_split.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,15 @@ use crate::appender::{Command, FastLogRecord, LogAppender};
use crate::consts::LogSize;
use crate::error::LogError;
use crate::{chan, Receiver, Sender};
use dark_std::errors::new;
use fastdate::DateTime;
use std::cell::RefCell;
use std::fs::{DirEntry, File, OpenOptions};
use std::io::{Seek, SeekFrom, Write};
use std::ops::Deref;
use std::path::Path;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::time::Duration;

pub trait SplitFile: Send {
Expand Down Expand Up @@ -95,12 +98,72 @@ pub trait Packer: Send {
fn retry(&self) -> i32 {
return 0;
}

fn create_log_name(&self, first_file_path: &str) -> String {
let path = Path::new(first_file_path);
let file_name = path
.file_name()
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.to_string();
let mut new_log_name = file_name.to_string();
let point = file_name.rfind(".");
match point {
None => {
new_log_name.push_str(
&DateTime::now()
.to_string()
.replace(" ", "T")
.replace(":", "-"),
);
}
Some(i) => {
let (name, ext) = file_name.split_at(i);
new_log_name = format!(
"{}{}{}",
name,
DateTime::now()
.to_string()
.replace(" ", "T")
.replace(":", "-"),
ext
);
}
}
new_log_name = first_file_path.trim_end_matches(&file_name).to_string() + &new_log_name;
return new_log_name;
}

fn parse_log_name(&self, file_name: &str, temp_name: &str) -> Result<DateTime, LogError> {
let path = Path::new(file_name);
let file_name = path
.file_name()
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.to_string();
if file_name.starts_with(&temp_name) {
let mut time_str = file_name.trim_start_matches(&temp_name).to_string();
if let Some(v) = time_str.rfind(".") {
time_str = time_str[0..v].to_string();
}
let time = DateTime::parse("YYYY-MM-DDThh:mm:ss.000000", &time_str);
return time.map_err(|e| LogError::from(e.to_string()));
} else {
return Err(LogError::E(format!(
"file_name={} not an pack file",
file_name
)));
}
}
}

/// split log file allow pack compress log
/// Memory space swop running time , reduces the number of repeated queries for IO
pub struct FileSplitAppender<F: SplitFile> {
pub struct FileSplitAppender<F: SplitFile, P: Packer> {
file: F,
packer: Arc<P>,
dir_path: String,
sender: Sender<LogPack>,
temp_size: LogSize,
Expand All @@ -109,13 +172,13 @@ pub struct FileSplitAppender<F: SplitFile> {
temp_name: String,
}

impl<F: SplitFile> FileSplitAppender<F> {
pub fn new<P: Packer + 'static, R: Rolling + 'static>(
impl<F: SplitFile, P: Packer + Sync + 'static> FileSplitAppender<F, P> {
pub fn new<R: Rolling + 'static>(
file_path: &str,
temp_size: LogSize,
rolling: R,
packer: P,
) -> Result<FileSplitAppender<F>, LogError> {
) -> Result<FileSplitAppender<F, P>, LogError> {
let temp_name = {
let buf = Path::new(&file_path);
let mut name = if buf.is_file() {
Expand Down Expand Up @@ -153,14 +216,16 @@ impl<F: SplitFile> FileSplitAppender<F> {
temp_bytes.store(offset, Ordering::Relaxed);
file.seek(SeekFrom::Start(temp_bytes.load(Ordering::Relaxed) as u64));
let (sender, receiver) = chan(None);
spawn_saver(temp_name.clone(), receiver, rolling, packer);
let arc_packer = Arc::new(packer);
spawn_saver(temp_name.clone(), receiver, rolling, arc_packer.clone());
Ok(Self {
temp_bytes,
dir_path: dir_path.to_string(),
file,
sender,
temp_size,
temp_name,
packer: arc_packer,
})
}
/// send data make an pack,and truncate data when finish.
Expand All @@ -170,38 +235,7 @@ impl<F: SplitFile> FileSplitAppender<F> {
sp = "/";
}
let first_file_path = format!("{}{}{}", self.dir_path, sp, &self.temp_name);
let path = Path::new(&first_file_path);
let file_name = path
.file_name()
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.to_string();
let mut new_log_name = file_name.to_string();
let point = file_name.rfind(".");
match point {
None => {
new_log_name.push_str(
&DateTime::now()
.to_string()
.replace(" ", "T")
.replace(":", "-"),
);
}
Some(i) => {
let (name, ext) = file_name.split_at(i);
new_log_name = format!(
"{}{}{}",
name,
DateTime::now()
.to_string()
.replace(" ", "T")
.replace(":", "-"),
ext
);
}
}
new_log_name = first_file_path.trim_end_matches(&file_name).to_string() + &new_log_name;
let new_log_name = self.packer.create_log_name(&first_file_path);
self.file.flush();
std::fs::copy(&first_file_path, &new_log_name);
self.sender.send(LogPack {
Expand Down Expand Up @@ -285,29 +319,10 @@ pub trait Rolling: Send {
}
return vec![];
}

/// parse `temp2023-07-20T10-13-17.452247.log`
fn file_name_parse_time(name: &str, temp_name: &str) -> Option<DateTime>
where
Self: Sized,
{
let base_name = get_base_name(&Path::new(temp_name));
if name.starts_with(&base_name) {
let mut time_str = name.trim_start_matches(&base_name).to_string();
if let Some(v) = time_str.rfind(".") {
time_str = time_str[0..v].to_string();
}
let time = DateTime::parse("YYYY-MM-DDThh:mm:ss.000000", &time_str);
if let Ok(time) = time {
return Some(time);
}
}
return None;
}
}

///rolling keep type
#[deprecated(note = "use RollingAll,RollingNum,RollingTime replace this")]
#[deprecated(note = "use RollingAll,RollingNum,RollingDuration replace this")]
#[derive(Copy, Clone, Debug)]
pub enum RollingType {
/// keep All of log packs
Expand Down Expand Up @@ -342,10 +357,13 @@ impl Rolling for RollingType {
let item = &paths_vec[index];
let file_name = item.file_name();
let name = file_name.to_str().unwrap_or("").to_string();
if let Some(time) = Self::file_name_parse_time(&name, temp_name) {
if now.clone().sub(duration.clone()) > time {
std::fs::remove_file(item.path());
removed += 1;
if let Ok(m) = item.metadata() {
if let Ok(c) = m.created() {
let time = DateTime::from(c);
if now.clone().sub(duration.clone()) > time {
std::fs::remove_file(item.path());
removed += 1;
}
}
}
}
Expand All @@ -356,7 +374,7 @@ impl Rolling for RollingType {
}
}

impl<F: SplitFile> LogAppender for FileSplitAppender<F> {
impl<F: SplitFile, P: Packer + Sync + 'static> LogAppender for FileSplitAppender<F, P> {
fn do_logs(&self, records: &[FastLogRecord]) {
//if temp_bytes is full,must send pack
let mut temp = String::with_capacity(records.len() * 10);
Expand Down Expand Up @@ -407,11 +425,11 @@ impl<F: SplitFile> LogAppender for FileSplitAppender<F> {
}

///spawn an saver thread to save log file or zip file
fn spawn_saver<P: Packer + 'static, R: Rolling + 'static>(
fn spawn_saver<P: Packer + Sync + 'static, R: Rolling + 'static>(
temp_name: String,
r: Receiver<LogPack>,
rolling: R,
packer: P,
packer: Arc<P>,
) {
std::thread::spawn(move || {
loop {
Expand All @@ -420,7 +438,7 @@ fn spawn_saver<P: Packer + 'static, R: Rolling + 'static>(
rolling.do_rolling(&pack.dir, &temp_name);
let log_file_path = pack.new_log_name.clone();
//do save pack
let remove = pack.do_pack(&packer);
let remove = pack.do_pack(packer.deref());
if let Ok(remove) = remove {
if remove {
std::fs::remove_file(log_file_path);
Expand Down
18 changes: 12 additions & 6 deletions src/plugin/rolling.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@ use crate::plugin::file_split::Rolling;
use fastdate::DateTime;
use std::time::Duration;

/// keeps all,do not rolling
pub struct RollingAll {}
impl Rolling for RollingAll {
fn do_rolling(&self, dir: &str, temp_name: &str) -> i64 {
0
}
}

/// rolling from file num
pub struct RollingNum {
pub num: i64,
}
Expand All @@ -28,11 +30,12 @@ impl Rolling for RollingNum {
}
}

pub struct RollingTime {
/// rolling from metadata
pub struct RollingDuration {
pub duration: Duration,
}

impl Rolling for RollingTime {
impl Rolling for RollingDuration {
fn do_rolling(&self, dir: &str, temp_name: &str) -> i64 {
let mut removed = 0;
let paths_vec = self.read_paths(dir, temp_name);
Expand All @@ -41,10 +44,13 @@ impl Rolling for RollingTime {
let item = &paths_vec[index];
let file_name = item.file_name();
let name = file_name.to_str().unwrap_or("").to_string();
if let Some(time) = Self::file_name_parse_time(&name, temp_name) {
if now.clone().sub(self.duration.clone()) > time {
std::fs::remove_file(item.path());
removed += 1;
if let Ok(m) = item.metadata() {
if let Ok(c) = m.created() {
let time = DateTime::from(c);
if now.clone().sub(self.duration.clone()) > time {
std::fs::remove_file(item.path());
removed += 1;
}
}
}
}
Expand Down
9 changes: 5 additions & 4 deletions tests/split_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
mod test {
use fast_log::appender::{Command, FastLogRecord, LogAppender};
use fast_log::consts::LogSize;
use fast_log::plugin::file_split::{FileSplitAppender, RawFile, Rolling};
use fast_log::plugin::file_split::{FileSplitAppender, Packer, RawFile, Rolling};
use fast_log::plugin::packer::LogPacker;
use fast_log::plugin::rolling::{RollingAll, RollingNum};
use log::Level;
Expand All @@ -13,7 +13,7 @@ mod test {
#[test]
fn test_send_pack() {
let _ = remove_dir_all("target/test/");
let appender = FileSplitAppender::<RawFile>::new(
let appender = FileSplitAppender::<RawFile, LogPacker>::new(
"target/test/",
LogSize::MB(1),
RollingAll {},
Expand All @@ -39,8 +39,9 @@ mod test {
}

#[test]
fn test_file_name_parse_time() {
let t = RollingAll::file_name_parse_time("temp2023-07-20T10-13-17.452247.log", "temp.log")
fn test_parse_log_name() {
let t = LogPacker {}
.parse_log_name("temp2023-07-20T10-13-17.452247.log", "temp.log")
.unwrap();
assert_eq!(t.to_string(), "2023-07-20 10:13:17.452247");
}
Expand Down

0 comments on commit dbf9e5b

Please sign in to comment.