├── .gitignore ├── Cargo.toml ├── .github └── workflows │ └── rust.yml ├── src ├── args.rs ├── main.rs ├── registry.rs └── fsevents.rs └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | 13 | # Added by cargo 14 | 15 | /target 16 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "fsevents_parser_rs" 3 | version = "0.1.1" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | clap = { version = "3.2.22", features = ["derive"] } 10 | regex = "1.6.0" 11 | flate2 = "1.0.24" 12 | bitflags = "1.3.2" 13 | serde = { version = "1.0.145", features = ["derive"] } 14 | serde_json = "1.0.85" 15 | csv = "1.1.6" 16 | rusqlite = { version = "0.28.0", features = ["bundled"] } -------------------------------------------------------------------------------- /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: Build Test 2 | 3 | on: 4 | push: 5 | branches: [ "main", "develop" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | 12 | jobs: 13 | build: 14 | runs-on: macos-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v4 18 | 19 | - name: Install latest nightly 20 | uses: actions-rs/toolchain@v1 21 | with: 22 | toolchain: nightly 23 | override: true 24 | components: rustfmt, clippy 25 | 26 | - name: Build 27 | run: cargo build --verbose 28 | -------------------------------------------------------------------------------- /src/args.rs: -------------------------------------------------------------------------------- 1 | pub use clap::Parser; 2 | use std::fs; 3 | 4 | #[derive(Debug, Parser)] 5 | #[clap(author, version, about, long_about=None)] 6 | pub struct ArgParse { 7 | #[clap( 8 | short, 9 | long, 10 | value_parser, 11 | default_value = "/System/Volumes/Data/.fseventsd" 12 | )] 13 | pub input_path: String, 14 | 15 | #[clap(short, long, value_parser, default_value = "./output.json")] 16 | pub output_path: String, 17 | 18 | #[clap(short, long, value_enum, 19 | default_value_t = ArgsOutputFormat::Json)] 20 | pub format: ArgsOutputFormat, 21 | } 22 | 23 | #[derive(clap::ValueEnum, Clone, Debug)] 24 | pub enum ArgsOutputFormat { 25 | Json, 26 | Csv, 27 | Sqlite, 28 | } 29 | 30 | pub fn validate_args(args: &ArgParse) -> bool { 31 | // check input file existence 32 | if let Err(err) = fs::read_dir(&args.input_path) { 33 | println!("invalid input path: {}", err); 34 | return false; 35 | } 36 | 37 | // check output path dir existence 38 | if let Err(err) = fs::remove_file(&args.output_path) { 39 | if err.kind() != std::io::ErrorKind::NotFound { 40 | println!("failed to remove legacy output: {}", err); 41 | return false; 42 | } 43 | } 44 | 45 | return true; 46 | } 47 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | mod args; 2 | use args::*; 3 | 4 | mod fsevents; 5 | mod registry; 6 | use registry::Registry; 7 | 8 | fn main() { 9 | // get args 10 | let args = ArgParse::parse(); 11 | if !validate_args(&args) { 12 | return; 13 | } 14 | 15 | // find all archives in fseventsd directory 16 | let archive_files = fsevents::find_archives(&args.input_path); 17 | if archive_files.len() == 0 { 18 | println!("no valid archive found in input directory, existing"); 19 | } 20 | println!( 21 | "found {} archives in {}", 22 | archive_files.len(), 23 | args.input_path 24 | ); 25 | 26 | parse_and_export(&archive_files, &args.output_path, args.format); 27 | } 28 | 29 | fn parse_and_export(archive_files: &Vec, output_path: &str, format: ArgsOutputFormat) { 30 | // create registry 31 | let mut reg: Box; 32 | match format { 33 | ArgsOutputFormat::Json => match registry::json::JsonRegistry::new(output_path) { 34 | Ok(r) => { 35 | reg = r; 36 | } 37 | Err(e) => { 38 | println!("failed to create registry: {}", e); 39 | return; 40 | } 41 | }, 42 | ArgsOutputFormat::Csv => match registry::csv::CsvRegistry::new(output_path) { 43 | Ok(r) => { 44 | reg = r; 45 | } 46 | Err(e) => { 47 | println!("failed to create csv registry: {}", e); 48 | return; 49 | } 50 | }, 51 | ArgsOutputFormat::Sqlite => match registry::sqlite::SqliteRegistry::new(output_path) { 52 | Ok(r) => { 53 | reg = r; 54 | } 55 | Err(e) => { 56 | println!("failed to create sqlite registry: {}", e); 57 | return; 58 | } 59 | }, 60 | } 61 | 62 | // parse fsevents and save 63 | archive_files.iter().for_each(|f| { 64 | if let Some(archive) = fsevents::parse_archive(f) { 65 | println!("---------- {} ----------", archive.filename); 66 | println!("page count: {}", archive.pages.len()); 67 | archive.pages.iter().for_each(|p| { 68 | println!("entry count: {}", p.entries.len()); 69 | }); 70 | 71 | reg.export_archive(&archive); 72 | } 73 | }); 74 | } 75 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FSEventsParser-rs 2 | Yet another fseventsd log parser for forensics. 3 | 4 | ## Usage 5 | Currently supports output in three formats: JSON, CSV, Sqlite(recommended). 6 | ```bash 7 | % ./fsevents_parser_rs -h 8 | fsevents_parser_rs 0.1.0 9 | 10 | USAGE: 11 | fsevents_parser_rs [OPTIONS] 12 | 13 | OPTIONS: 14 | -f, --format [default: json] [possible values: json, csv, sqlite] 15 | -h, --help Print help information 16 | -i, --input-path [default: /System/Volumes/Data/.fseventsd] 17 | -o, --output-path [default: ./output.json] 18 | -V, --version Print version information 19 | ``` 20 | 21 | Use JSON output format. 22 | ```bash 23 | % sudo ./fsevents_parser_rs 24 | found 21 archives in /System/Volumes/Data/.fseventsd 25 | ---------- 0000000000089492 ---------- 26 | page count: 2 27 | entry count: 2049 28 | entry count: 681 29 | ...... 30 | ...... 31 | ---------- 000000000004c323 ---------- 32 | page count: 2 33 | entry count: 1850 34 | entry count: 880 35 | % cat ./output.json | tail -n 3 36 | {"path":"private/var/log/DiagnosticMessages/StoreData\u0000","id":308039,"flags":"FSE_CONTENT_MODIFIED | FSE_IS_FILE","create_ts":1664093703,"modify_ts":1664093703,"source":"000000000004c323"} 37 | {"path":"private/var/log/system.log\u0000","id":308036,"flags":"FSE_CONTENT_MODIFIED | FSE_IS_FILE","create_ts":1664093703,"modify_ts":1664093703,"source":"000000000004c323"} 38 | {"path":"private/var/root/Library/Logs/Bluetooth/bluetoothd-hci-latest.pklg\u0000","id":309733,"flags":"FSE_CONTENT_MODIFIED | FSE_IS_FILE","create_ts":1664093703,"modify_ts":1664093703,"source":"000000000004c323"} 39 | ``` 40 | 41 | Use Sqlite output format. 42 | ``` 43 | % sudo ./target/debug/fsevents_parser_rs -o ./output.sqlite -f sqlite 44 | found 8 archives in /System/Volumes/Data/.fseventsd 45 | ---------- 000000000000c760 ---------- 46 | page count: 2 47 | entry count: 1947 48 | entry count: 783 49 | ...... 50 | ...... 51 | ---------- 000000000001186b ---------- 52 | page count: 2 53 | entry count: 2217 54 | entry count: 513 55 | % sqlite3 ./output.sqlite 'select * from record;' | tail -n 3 56 | private/var/run/utmpx|4613|FSE_CONTENT_MODIFIED | FSE_IS_FILE|1664298667|1664298667|000000000000489c 57 | private/var/sntpd/state.bin|15973|FSE_STAT_CHANGED | FSE_IS_FILE|1664298667|1664298667|000000000000489c 58 | private/var/tmp/kernel_panics|4276|FSE_CHOWN | FSE_IS_DIR|1664298667|1664298667|000000000000489c 59 | ``` 60 | 61 | ## References 62 | [FSEventsParser](https://github.com/dlcowen/FSEventsParser) 63 | 64 | [MacOS File System Events Disk Log Stream format](https://github.com/libyal/dtformats/blob/main/documentation/MacOS%20File%20System%20Events%20Disk%20Log%20Stream%20format.asciidoc) 65 | 66 | [macos-fseventsd](https://github.com/puffyCid/macos-fseventsd) -------------------------------------------------------------------------------- /src/registry.rs: -------------------------------------------------------------------------------- 1 | use crate::fsevents::Archive; 2 | pub trait Registry { 3 | fn export_archive(&mut self, archive: &Archive) -> bool; 4 | } 5 | 6 | pub mod json { 7 | 8 | use std::fs; 9 | use std::io::Write; 10 | use std::time::UNIX_EPOCH; 11 | 12 | use serde::Serialize; 13 | use serde_json; 14 | 15 | use crate::fsevents::Archive; 16 | use crate::registry::Registry; 17 | 18 | pub struct JsonRegistry { 19 | pub written_count: usize, 20 | 21 | fd: fs::File, 22 | } 23 | 24 | #[derive(Serialize)] 25 | struct JsonRecord { 26 | path: String, // record path 27 | id: u64, // record id 28 | flags: String, // flag description 29 | 30 | create_ts: u64, 31 | modify_ts: u64, 32 | source: String, // source archive file name 33 | } 34 | 35 | impl JsonRegistry { 36 | pub fn new(path: &str) -> Result, std::io::Error> { 37 | Ok(Box::new(JsonRegistry { 38 | written_count: 0, 39 | fd: fs::File::create(path)?, 40 | })) 41 | } 42 | } // impl JsonResgistry 43 | 44 | impl Registry for JsonRegistry { 45 | fn export_archive(&mut self, archive: &Archive) -> bool { 46 | for page in archive.pages.iter() { 47 | for entry in page.entries.iter() { 48 | let json_record = JsonRecord { 49 | path: String::from(&entry.full_path), 50 | id: entry.event_id, 51 | flags: format!("{:?}", entry.flags), 52 | 53 | create_ts: archive 54 | .ctime 55 | .duration_since(UNIX_EPOCH) 56 | .unwrap_or_default() 57 | .as_secs(), 58 | modify_ts: archive 59 | .mtime 60 | .duration_since(UNIX_EPOCH) 61 | .unwrap_or_default() 62 | .as_secs(), 63 | source: String::from(&archive.filename), 64 | }; 65 | 66 | if let Ok(j) = serde_json::to_string(&json_record) { 67 | if let Err(e) = self.fd.write_all(&j.as_bytes()) { 68 | println!("failed to write json record: {}", e); 69 | break; 70 | } 71 | _ = self.fd.write(b"\n"); 72 | } 73 | } 74 | } 75 | 76 | true 77 | } 78 | } // impl Registry for JsonRegistry 79 | } // mod json_registry 80 | 81 | pub mod csv { 82 | 83 | use std::fs; 84 | use std::time::UNIX_EPOCH; 85 | 86 | use csv; 87 | 88 | use crate::fsevents::Archive; 89 | use crate::registry::Registry; 90 | 91 | pub struct CsvRegistry { 92 | pub written_count: usize, 93 | 94 | writer: csv::Writer, 95 | } 96 | 97 | impl CsvRegistry { 98 | pub fn new(path: &str) -> Result, std::io::Error> { 99 | Ok(Box::new(CsvRegistry { 100 | written_count: 0, 101 | writer: csv::Writer::from_path(path)?, 102 | })) 103 | } 104 | } // impl JsonResgistry 105 | 106 | impl Registry for CsvRegistry { 107 | fn export_archive(&mut self, archive: &Archive) -> bool { 108 | for page in archive.pages.iter() { 109 | for entry in page.entries.iter() { 110 | let csv_record = ( 111 | &entry.full_path, 112 | &entry.event_id, 113 | format!("{:?}", entry.flags), 114 | archive 115 | .ctime 116 | .duration_since(UNIX_EPOCH) 117 | .unwrap_or_default() 118 | .as_secs(), 119 | archive 120 | .mtime 121 | .duration_since(UNIX_EPOCH) 122 | .unwrap_or_default() 123 | .as_secs(), 124 | &archive.filename, 125 | ); 126 | 127 | if let Err(e) = self.writer.serialize(csv_record) { 128 | println!("failed to serialize record to csv: {}", e); 129 | continue; 130 | } 131 | if let Err(e) = self.writer.flush() { 132 | println!("failed to write record to file: {}", e); 133 | break; 134 | } 135 | } 136 | } 137 | 138 | true 139 | } 140 | } // impl Registry for CsvRegistry 141 | } // mod csv 142 | 143 | pub mod sqlite { 144 | 145 | use std::time::UNIX_EPOCH; 146 | 147 | use rusqlite; 148 | 149 | use crate::fsevents::Archive; 150 | use crate::registry::Registry; 151 | 152 | pub struct SqliteRegistry { 153 | pub written_count: usize, 154 | 155 | conn: rusqlite::Connection, 156 | } 157 | 158 | impl SqliteRegistry { 159 | pub fn new(path: &str) -> Result, rusqlite::Error> { 160 | let conn = rusqlite::Connection::open(path)?; 161 | conn.execute( 162 | "CREATE TABLE record ( 163 | path TEXT, 164 | id TEXT NOT NULL, 165 | flags TEXT, 166 | create_ts INTEGER, 167 | modify_ts INTEGER, 168 | source TEXT NOT NULL 169 | )", 170 | (), 171 | )?; 172 | 173 | Ok(Box::new(SqliteRegistry { 174 | written_count: 0, 175 | conn, 176 | })) 177 | } 178 | } // impl SqliteRegistry 179 | 180 | impl Registry for SqliteRegistry { 181 | fn export_archive(&mut self, archive: &Archive) -> bool { 182 | // start transaction 183 | let txn = match self.conn.transaction() { 184 | Ok(t) => t, 185 | Err(e) => { 186 | println!("failed to create transaction: {}", e); 187 | return false; 188 | } 189 | }; 190 | 191 | for page in archive.pages.iter() { 192 | for entry in page.entries.iter() { 193 | if let Err(e) = txn.execute( 194 | "INSERT INTO record ( 195 | path, id, flags, create_ts, modify_ts, source) 196 | VALUES (?1, ?2, ?3, ?4, ?5, ?6)", 197 | ( 198 | &entry.full_path, 199 | &entry.event_id.to_string(), 200 | format!("{:?}", entry.flags), 201 | archive 202 | .ctime 203 | .duration_since(UNIX_EPOCH) 204 | .unwrap_or_default() 205 | .as_secs(), 206 | archive 207 | .mtime 208 | .duration_since(UNIX_EPOCH) 209 | .unwrap_or_default() 210 | .as_secs(), 211 | &archive.filename, 212 | ), 213 | ) { 214 | println!("failed to insert record: {}", e); 215 | continue; 216 | } 217 | } 218 | } 219 | 220 | // end transaction 221 | if let Err(e) = txn.commit() { 222 | println!("failed to commit transaction: {}", e); 223 | return false; 224 | } 225 | 226 | true 227 | } 228 | } // impl Registry for SqliteRegistry 229 | } // mod sqlite 230 | -------------------------------------------------------------------------------- /src/fsevents.rs: -------------------------------------------------------------------------------- 1 | use regex::Regex; 2 | use std::io::Read; 3 | use std::{fmt, fs}; 4 | 5 | use bitflags::bitflags; 6 | use flate2::read::MultiGzDecoder; 7 | 8 | pub fn find_archives(dir: &str) -> Vec { 9 | let fname_re = Regex::new("^[0-9a-f]{16}$").unwrap(); 10 | 11 | if let Ok(dir_result) = fs::read_dir(dir) { 12 | return dir_result 13 | .into_iter() 14 | .filter_map(|s| { 15 | // file name & type 16 | let ss = s.ok()?; 17 | match fname_re.is_match(ss.file_name().to_str()?) && ss.metadata().ok()?.is_file() { 18 | true => Some(String::from(ss.path().to_str()?)), 19 | false => None, 20 | } 21 | }) 22 | .collect::>(); 23 | } 24 | 25 | vec![] // failed to read dir 26 | } 27 | 28 | pub fn parse_archive(file_path: &str) -> Option { 29 | // parse from compressed file 30 | let parse_result = Archive::new(file_path); 31 | match parse_result { 32 | Ok(archive) => { 33 | if archive.pages.len() == 0 { 34 | println!("archive contains no pages"); 35 | return None; 36 | } 37 | 38 | // println!("parse archive {} succeeded, page count: {}", 39 | // archive.filename, archive.pages.len()); 40 | Some(archive) 41 | } 42 | Err(e) => { 43 | println!("failed to parse: {:?}", e); 44 | None 45 | } 46 | } 47 | } 48 | 49 | #[derive(Debug)] 50 | pub struct Archive { 51 | pub pages: Vec, 52 | 53 | pub filename: String, 54 | pub mtime: std::time::SystemTime, 55 | pub ctime: std::time::SystemTime, 56 | } 57 | 58 | impl Archive { 59 | pub fn new(path: &str) -> Result> { 60 | // timestamp & filename 61 | let metadata = fs::metadata(path)?; 62 | let filename = match std::path::Path::new(path).file_name() { 63 | Some(s) => match s.to_str() { 64 | Some(s) => String::from(s), 65 | None => String::from(""), 66 | }, 67 | None => String::from(""), 68 | }; 69 | 70 | /* 71 | * pages 72 | */ 73 | // uncompress 74 | let mut buf = Vec::new(); 75 | let fd = fs::File::open(path)?; 76 | let mut decoder = MultiGzDecoder::new(fd); 77 | decoder.read_to_end(&mut buf)?; 78 | // println!("uncompressed size: {} {}", filename, buf.len()); 79 | 80 | // parse all pages 81 | let mut pages = vec![]; 82 | let mut offset: usize = 0; 83 | while offset < buf.len() { 84 | match Page::new(&buf[offset..]) { 85 | Ok((page, consumed)) => { 86 | offset += consumed; 87 | // println!("parse page succeeded: {:?}, entry count: {}, page consumed: {}, stream left: {}", 88 | // page.header, page.entries.len(), consumed, buf.len() - offset); 89 | pages.push(page); 90 | } 91 | Err(e) => { 92 | println!( 93 | "encountered error when parsing page, move to next archive: {:?}", 94 | e 95 | ); 96 | break; 97 | } 98 | } 99 | } 100 | 101 | Ok(Archive { 102 | pages, 103 | filename, 104 | mtime: metadata.modified()?, 105 | ctime: metadata.created()?, 106 | }) 107 | } 108 | } // impl Archive 109 | 110 | #[derive(Debug)] 111 | pub struct Page { 112 | pub header: PageHeader, 113 | pub entries: Vec, 114 | } 115 | 116 | impl Page { 117 | // usize consumed 118 | pub fn new(mem: &[u8]) -> Result<(Self, usize), Box> { 119 | // find page magic 120 | let offset = mem.windows(4).position(|window| window == b"1SLD" || window == b"2SLD" || window == b"3SLD"); 121 | let mut offset = offset.unwrap_or(usize::MAX); 122 | if offset > mem.len() { 123 | return Err(Box::new(ParseError::NoPageFound)); 124 | } 125 | 126 | // parse header 127 | let header = PageHeader::new(&mem[offset..])?; 128 | if matches!(header.version, Version::Unknown) { 129 | return Err(Box::new(ParseError::UnsupportedVersion)); 130 | } 131 | 132 | // parse entries by length 133 | // println!("parsing entries in page, size: {}", header.stream_size); 134 | offset += PageHeader::len(); // skip header 135 | let mut entries = vec![]; 136 | while offset < header.stream_size as usize && offset < mem.len() - 1 { 137 | if let Some(path_len) = mem[offset..].iter().position(|&r| r == 0) { 138 | /* 139 | * | full path | end with 0x00 140 | * | event id | 8 bytes 141 | * | event flags | 4 bytes 142 | * | node id | 8 bytes (version >= v2) 143 | * | unknown | 4 bytes (version >= v3) 144 | */ 145 | 146 | // path can be empty? offset == end_offset 147 | let end_offset = offset + path_len; 148 | let tail_len = match header.version { 149 | Version::V3 => 24, 150 | Version::V2 => 20, 151 | _ => 12, 152 | }; 153 | if end_offset + tail_len >= mem.len() { 154 | // other attributes 155 | println!( 156 | "invalid record for path, stop parsing page: {:?}", 157 | &mem[offset..end_offset + 1] 158 | ); 159 | break; 160 | } 161 | 162 | let full_path = String::from_utf8_lossy(&mem[offset..end_offset]).into_owned(); 163 | offset = end_offset + 1; // skip 0x00 164 | // println!("found path: {}", full_path); 165 | 166 | // event id 167 | let event_id = u64::from_le_bytes(mem[offset..offset + 8].try_into()?); 168 | offset += 8; 169 | // println!("event id: {}", event_id); 170 | 171 | // flags 172 | let flags = u32::from_le_bytes(mem[offset..offset + 4].try_into()?); 173 | offset += 4; 174 | // println!("event flags: {}", flags); 175 | 176 | // skip node id, unknown column 177 | match header.version { 178 | Version::V2 => offset += 8, 179 | Version::V3 => offset += 12, 180 | _ => {} 181 | } 182 | 183 | // new entry generated 184 | entries.push(Entry { 185 | full_path, 186 | event_id, 187 | flags: EventFlag::from_bits_truncate(flags), 188 | }); 189 | } else { 190 | // no 0x00 any more 191 | offset = mem.len(); 192 | break; 193 | } 194 | } 195 | 196 | Ok((Page { header, entries }, offset)) // mem len actually consumed 197 | } 198 | } // impl Page 199 | 200 | #[derive(Debug)] 201 | pub struct PageHeader { 202 | version: Version, 203 | stream_size: u32, 204 | } 205 | #[derive(Debug)] 206 | pub enum Version { 207 | Unknown, 208 | V1, 209 | V2, 210 | V3, 211 | } 212 | impl PageHeader { 213 | pub fn new(mem: &[u8]) -> Result> { 214 | // validate len 215 | if mem.len() < Self::len() { 216 | return Err(Box::new(ParseError::InvalidHeader)); 217 | } 218 | 219 | // parse version 220 | let mut version = Version::Unknown; 221 | if mem.starts_with(b"1SLD") { 222 | version = Version::V1; 223 | } else if mem.starts_with(b"2SLD") { 224 | version = Version::V2; 225 | } else if mem.starts_with(b"3SLD") { 226 | version = Version::V3; 227 | } 228 | if matches!(version, Version::Unknown) { 229 | return Err(Box::new(ParseError::InvalidHeader)); 230 | } 231 | 232 | // parse len 233 | let len = u32::from_le_bytes(mem[8..12].try_into()?); 234 | if len as usize > mem.len() { 235 | return Err(Box::new(ParseError::InvalidHeader)); 236 | } 237 | 238 | Ok(PageHeader { 239 | version, 240 | stream_size: len, 241 | }) 242 | } 243 | 244 | pub fn len() -> usize { 245 | 12 246 | } 247 | } // impl PageHeader 248 | 249 | #[derive(Debug)] 250 | pub struct Entry { 251 | pub full_path: String, 252 | pub event_id: u64, 253 | pub flags: EventFlag, 254 | } 255 | 256 | bitflags! { 257 | pub struct EventFlag : u32 { 258 | const FSE_NONE = 0x00000000; 259 | 260 | const FSE_CREATE_FILE = 0x00000001; 261 | const FSE_DELETE = 0x00000002; 262 | const FSE_STAT_CHANGED = 0x00000004; 263 | const FSE_RENAME = 0x00000008; 264 | const FSE_CONTENT_MODIFIED = 0x00000010; 265 | const FSE_EXCHANGE = 0x00000020; 266 | const FSE_FINDER_INFO_CHANGED = 0x00000040; 267 | const FSE_CREATE_DIR = 0x00000080; 268 | const FSE_CHOWN = 0x00000100; 269 | const FSE_XATTR_MODIFIED = 0x00000200; 270 | const FSE_XATTR_REMOVED = 0x00000400; 271 | const FSE_DOCID_CREATED = 0x00000800; 272 | const FSE_DOCID_CHANGED = 0x00001000; 273 | const FSE_UNMOUNT_PENDING = 0x00002000; 274 | const FSE_CLONE = 0x00004000; 275 | const FSE_MODE_CLONE = 0x00010000; 276 | const FSE_TRUNCATED_PATH = 0x00020000; 277 | const FSE_REMOTE_DIR_EVENT = 0x00040000; 278 | const FSE_MODE_LAST_HLINK = 0x00080000; 279 | const FSE_MODE_HLINK = 0x00100000; 280 | 281 | const FSE_IS_SYMLINK = 0x00400000; 282 | const FSE_IS_FILE = 0x00800000; 283 | const FSE_IS_DIR = 0x01000000; 284 | const FSE_MOUNT = 0x02000000; 285 | const FSE_UNMOUNT = 0x04000000; 286 | 287 | const FSE_END_TRANSACTION = 0x20000000; 288 | } 289 | } 290 | 291 | #[derive(Debug)] 292 | pub enum ParseError { 293 | NoPageFound, 294 | InvalidHeader, 295 | UnsupportedVersion, 296 | IoError(std::io::Error), 297 | } 298 | impl std::error::Error for ParseError {} 299 | impl std::fmt::Display for ParseError { 300 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 301 | match self { 302 | ParseError::NoPageFound => { 303 | write!(f, "no v2 page found") 304 | } 305 | ParseError::InvalidHeader => { 306 | write!(f, "invalid header") 307 | } 308 | ParseError::UnsupportedVersion => { 309 | write!(f, "page version not supported") 310 | } 311 | ParseError::IoError(e) => { 312 | write!(f, "{}", e) 313 | } 314 | } 315 | } 316 | } 317 | impl From for ParseError { 318 | fn from(err: std::io::Error) -> Self { 319 | ParseError::IoError(err) 320 | } 321 | } 322 | --------------------------------------------------------------------------------