1
Fork 0
twitter-monitor/src/twitter/curl.rs
2025-04-07 12:44:22 +07:00

181 lines
No EOL
6.5 KiB
Rust

use anyhow::{anyhow, Result};
use regex::Regex;
use std::collections::HashMap;
use reqwest::header;
use serde_json;
use urlencoding;
#[derive(Debug, Clone)]
pub struct CurlAuth {
pub url: String,
pub headers: header::HeaderMap,
pub data: Option<String>,
pub url_parameters: Option<serde_json::Value>,
}
impl CurlAuth {
pub fn from_curl(curl_command: &str) -> Result<Self> {
println!("Parsing curl command...");
// Extract the URL using regex
let url_regex = Regex::new(r"curl\s+'([^']+)'")?;
let url = match url_regex.captures(curl_command) {
Some(captures) => {
match captures.get(1) {
Some(url_match) => url_match.as_str().to_string(),
None => return Err(anyhow!("Could not extract URL from regex match"))
}
},
None => return Err(anyhow!("Could not find URL in curl command: {}", curl_command))
};
println!("URL: {}", url);
// Create a new HeaderMap
let mut headers = header::HeaderMap::new();
// Extract headers using regex
let header_regex = Regex::new(r"-H\s+'([^']+)'")?;
println!("Extracting headers...");
for cap in header_regex.captures_iter(curl_command) {
if let Some(header) = cap.get(1) {
let header_str = header.as_str();
if let Some((key, value)) = header_str.split_once(':') {
let key = key.trim();
let value = value.trim();
println!("Header: {} = {}", key, value);
// Parse the header name and value
if let Ok(header_name) = key.parse::<header::HeaderName>() {
if let Ok(header_value) = value.parse::<header::HeaderValue>() {
headers.insert(header_name, header_value);
} else {
println!("Warning: Could not parse header value: {}", value);
}
} else {
println!("Warning: Could not parse header name: {}", key);
}
}
}
}
// Extract cookies using regex and add them to the headers
let cookie_regex = Regex::new(r"-b\s+'([^']+)'")?;
if let Some(captures) = cookie_regex.captures(curl_command) {
if let Some(cookies_str) = captures.get(1) {
println!("Found cookies: {}", cookies_str.as_str());
if let Ok(cookie_value) = cookies_str.as_str().parse::<header::HeaderValue>() {
headers.insert(header::COOKIE, cookie_value);
} else {
println!("Warning: Could not parse cookie value");
}
}
}
// Extract data using regex - try different formats
let mut data = None;
// First try --data-raw
let data_raw_regex = Regex::new(r"--data-raw\s+'([^']+)'")?;
if let Some(captures) = data_raw_regex.captures(curl_command) {
if let Some(data_match) = captures.get(1) {
data = Some(data_match.as_str().to_string());
println!("Found data-raw: {}", data.as_ref().unwrap());
}
}
// If not found, try --data
if data.is_none() {
let data_regex = Regex::new(r"--data\s+'([^']+)'")?;
if let Some(captures) = data_regex.captures(curl_command) {
if let Some(data_match) = captures.get(1) {
data = Some(data_match.as_str().to_string());
println!("Found data: {}", data.as_ref().unwrap());
}
}
}
// If not found, try -d
if data.is_none() {
let d_regex = Regex::new(r"-d\s+'([^']+)'")?;
if let Some(captures) = d_regex.captures(curl_command) {
if let Some(data_match) = captures.get(1) {
data = Some(data_match.as_str().to_string());
println!("Found -d: {}", data.as_ref().unwrap());
}
}
}
// Extract URL parameters if present
let url_parameters = if url.contains('?') {
extract_url_parameters(&url)
} else {
None
};
// If we have URL parameters but no data, create data from parameters
if data.is_none() && url_parameters.is_some() {
let params = url_parameters.as_ref().unwrap();
println!("Creating data from URL parameters");
data = Some(serde_json::to_string(params).unwrap_or_default());
println!("Created data: {}", data.as_ref().unwrap());
}
// Check for essential auth headers/cookies
if !headers.contains_key(header::COOKIE) {
println!("Warning: Missing cookie header");
}
println!("CurlAuth created with URL and {} headers", headers.len());
println!("=== URL ===");
println!("{}", url);
println!("=== Headers ===");
for (name, value) in headers.iter() {
println!(" {}: {}", name, value.to_str().unwrap_or("[binary value]"));
}
println!("=== Data ===");
if let Some(data_str) = &data {
println!("{}", data_str);
} else {
println!(" [No data]");
}
println!("============");
Ok(Self {
url,
headers,
data,
url_parameters,
})
}
}
fn extract_url_parameters(url: &str) -> Option<serde_json::Value> {
if let Some(query_part) = url.split('?').nth(1) {
let mut result = serde_json::Map::new();
// Process each query parameter
for param in query_part.split('&') {
let parts: Vec<&str> = param.split('=').collect();
if parts.len() == 2 {
let key = parts[0];
let value = parts[1];
if key == "variables" || key == "features" {
if let Ok(decoded) = urlencoding::decode(value) {
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&decoded) {
result.insert(key.to_string(), json);
}
}
}
}
}
if !result.is_empty() {
return Some(serde_json::Value::Object(result));
}
}
None
}