@@ -74,14 +74,7 @@ impl ProcessError {
7474
7575// Structs
7676
77- #[ derive( Debug , Deserialize , Serialize ) ]
78- #[ serde( default ) ]
79- pub struct MainConfig {
80- pub crawler : CrawlerConfig ,
81- pub scanner : ScannerConfig ,
82- pub network : NetworkConfig ,
83- }
84-
77+ // database.toml
8578#[ derive( Debug , Deserialize , Serialize ) ]
8679#[ serde( default ) ]
8780pub struct DatabaseConfig {
@@ -92,25 +85,35 @@ pub struct DatabaseConfig {
9285 pub database : String ,
9386}
9487
88+ // main.toml
89+ #[ derive( Debug , Deserialize , Serialize ) ]
90+ #[ serde( default ) ]
91+ pub struct MainConfig {
92+ pub crawler : CrawlerConfig ,
93+ pub scanner : ScannerConfig ,
94+ pub general : GeneralConfig ,
95+ }
96+
9597#[ derive( Debug , Deserialize , Serialize ) ]
9698pub struct CrawlerConfig {
9799 pub ips_per_iteration : u32 ,
98100 pub max_tasks : u32 ,
99- pub runs : u32 ,
100- pub time_between_runs : u64 ,
101- pub default_ports : Vec < u16 > ,
101+ pub time_between_iteration : u64 ,
102102}
103103
104104#[ derive( Debug , Deserialize , Serialize ) ]
105105pub struct ScannerConfig {
106106 pub max_tasks : u32 ,
107- pub default_ports : Vec < u16 > ,
108107}
109108
110109#[ derive( Debug , Deserialize , Serialize ) ]
111- pub struct NetworkConfig {
112- pub max_tasks : u32 ,
113- pub timeout : u64 ,
110+ pub struct GeneralConfig {
111+ pub max_network_tasks : u32 ,
112+ pub ping_timeout : u64 ,
113+ pub query_timeout : u64 ,
114+ pub join_timeout : u64 ,
115+ pub do_uuid_fetch : bool ,
116+ pub default_ports : Vec < u16 > , // TODO
114117}
115118
116119// Process code
@@ -134,43 +137,44 @@ impl MainConfig {
134137 pub fn validate ( & self ) -> Vec < ConfigError > {
135138 let mut errors = Vec :: new ( ) ;
136139
137- // [network ]
138- let net = & self . network ;
139- if net . max_tasks < 10 {
140- errors. push ( InvalidValue ( "network .max_tasks" . into ( ) , "Must be at least 10 for performance." . into ( ) ) ) ;
141- } else if net . max_tasks > 20000 {
142- errors. push ( InvalidValue ( "network .max_tasks" . into ( ) , "Above 20000 might crash your network stack." . into ( ) ) ) ;
140+ // [general ]
141+ let general = & self . general ;
142+ if general . max_network_tasks < 10 {
143+ errors. push ( InvalidValue ( "general .max_tasks" . into ( ) , "Must be at least 10 for performance." . into ( ) ) ) ;
144+ } else if general . max_network_tasks > 20000 {
145+ errors. push ( InvalidValue ( "general .max_tasks" . into ( ) , "Above 20000 might crash your network stack." . into ( ) ) ) ;
143146 }
144147
145- if !( 80 ..=15000 ) . contains ( & net. timeout ) {
146- errors. push ( InvalidValue ( "network.timeout" . into ( ) , "Keep it between 80ms and 15s." . into ( ) ) ) ;
148+ if !( 80 ..=15000 ) . contains ( & general. ping_timeout ) {
149+ errors. push ( InvalidValue ( "general.ping_timeout" . into ( ) , "Keep it between 80ms and 15s." . into ( ) ) ) ;
150+ }
151+ if !( 80 ..=15000 ) . contains ( & general. query_timeout ) {
152+ errors. push ( InvalidValue ( "general.query_timeout" . into ( ) , "Keep it between 80ms and 15s." . into ( ) ) ) ;
153+ }
154+ if !( 80 ..=15000 ) . contains ( & general. join_timeout ) {
155+ errors. push ( InvalidValue ( "general.join_timeout" . into ( ) , "Keep it between 80ms and 15s." . into ( ) ) ) ;
156+ }
157+
158+ if general. default_ports . is_empty ( ) {
159+ errors. push ( MissingRequired ( "general.default_ports" . into ( ) ) ) ;
160+ } else if general. default_ports . iter ( ) . any ( |& p| p == 0 ) {
161+ errors. push ( InvalidValue ( "general.default_ports" . into ( ) , "Port 0 is not allowed." . into ( ) ) ) ;
147162 }
148163
149164 // [crawler]
150165 let crawl = & self . crawler ;
151166 if crawl. ips_per_iteration < 1000 {
152167 errors. push ( InvalidValue ( "crawler.ips_per_iteration" . into ( ) , "Too low. At least 1000 required." . into ( ) ) ) ;
153168 }
154-
155- if crawl. default_ports . is_empty ( ) {
156- errors. push ( MissingRequired ( "crawler.default_ports" . into ( ) ) ) ;
157- } else if crawl. default_ports . iter ( ) . any ( |& p| p == 0 ) {
158- errors. push ( InvalidValue ( "crawler.default_ports" . into ( ) , "Port 0 is not allowed." . into ( ) ) ) ;
159- }
160-
161- // [scanning]
162- if self . scanner . default_ports . is_empty ( ) {
163- errors. push ( MissingRequired ( "scanning.default_ports" . into ( ) ) ) ;
164- }
165169 errors
166170 }
167171
168172 pub fn get_crawler_tasks ( & self ) -> u32 {
169- if self . crawler . max_tasks == 0 { self . network . max_tasks } else { self . crawler . max_tasks }
173+ if self . crawler . max_tasks == 0 { self . general . max_network_tasks } else { self . crawler . max_tasks }
170174 }
171175
172176 pub fn get_scanner_tasks ( & self ) -> u32 {
173- if self . scanner . max_tasks == 0 { self . network . max_tasks } else { self . scanner . max_tasks }
177+ if self . scanner . max_tasks == 0 { self . general . max_network_tasks } else { self . scanner . max_tasks }
174178 }
175179}
176180
@@ -247,7 +251,7 @@ impl DatabaseConfig {
247251
248252pub fn init ( root_path : Option < String > ) -> Result < ( ) , ProcessError > {
249253 let header = "# ServerRawler configuration file\n \
250- # Github: https://github.com/Cyberdolfi/ServerRawler\n ";
254+ # Github: https://github.com/Cyberdolfi/ServerRawler";
251255 let dir = root_path. map ( PathBuf :: from) . unwrap_or_else ( || PathBuf :: from ( "." ) . join ( "config" ) ) ;
252256
253257 if !dir. exists ( ) {
@@ -257,12 +261,13 @@ pub fn init(root_path: Option<String>) -> Result<(), ProcessError> {
257261 let config_file = dir. join ( "config.toml" ) ;
258262 let db_file = dir. join ( "database.toml" ) ;
259263
264+ // Generating configurations
260265 if !config_file. exists ( ) {
261266 let default_config = MainConfig :: default ( ) ;
262267 let toml_content = toml:: to_string_pretty ( & default_config) . unwrap ( ) ;
263268
264269 let doc_link = "# Read the docs here: https://cyberdolfi.github.io/ServerRawler/docs/configuration/config" ;
265- let final_content = format ! ( "{}{} {}" , header, doc_link, toml_content) ;
270+ let final_content = format ! ( "{}\n {} \n \n {}" , header, doc_link, toml_content) ;
266271 fs:: write ( config_file, final_content) . map_err ( Io ) ?;
267272 }
268273
@@ -271,7 +276,7 @@ pub fn init(root_path: Option<String>) -> Result<(), ProcessError> {
271276 let toml_content = toml:: to_string_pretty ( & default_db) . unwrap ( ) ;
272277
273278 let doc_link = "# Read the docs here: https://cyberdolfi.github.io/ServerRawler/docs/configuration/database" ;
274- let final_content = format ! ( "{}{} {}" , header, doc_link, toml_content) ;
279+ let final_content = format ! ( "{}\n {} \n \n {}" , header, doc_link, toml_content) ;
275280 fs:: write ( db_file, final_content) . map_err ( Io ) ?;
276281 }
277282
@@ -286,17 +291,18 @@ impl Default for MainConfig {
286291 crawler : CrawlerConfig {
287292 ips_per_iteration : 1000000 ,
288293 max_tasks : 0 ,
289- runs : 0 ,
290- time_between_runs : 0 ,
291- default_ports : vec ! [ 25565 ] ,
294+ time_between_iteration : 0 ,
292295 } ,
293296 scanner : ScannerConfig {
294297 max_tasks : 0 ,
295- default_ports : vec ! [ 25565 ] ,
296298 } ,
297- network : NetworkConfig {
298- max_tasks : 2000 ,
299- timeout : 3000 ,
299+ general : GeneralConfig {
300+ max_network_tasks : 2000 ,
301+ ping_timeout : 3000 ,
302+ query_timeout : 3000 ,
303+ join_timeout : 3000 ,
304+ do_uuid_fetch : true ,
305+ default_ports : vec ! [ 25565 ] ,
300306 } ,
301307 }
302308 }
@@ -330,7 +336,7 @@ mod tests {
330336 #[ test]
331337 fn test_invalid_network_tasks ( ) {
332338 let mut config = MainConfig :: default ( ) ;
333- config. network . max_tasks = 5 ;
339+ config. general . max_network_tasks = 5 ;
334340 let errors = config. validate ( ) ;
335341 assert ! ( !errors. is_empty( ) ) ;
336342 }
0 commit comments