fix(rust): Fix linting and formatting issues for CI
- Applied cargo fmt to ensure consistent formatting - Fixed all clippy warnings (uninlined_format_args) - Code now passes all CI checks with RUSTFLAGS="-D warnings" Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -83,10 +83,10 @@ impl ApiClient {
|
||||
// Add auth token if account_id is provided
|
||||
if let Some(id) = account_id {
|
||||
if let Some(token) = self.get_token(id) {
|
||||
log::debug!("Adding auth token for account {}: {}", id, token);
|
||||
log::debug!("Adding auth token for account {id}: {token}");
|
||||
req = req.header(TOKEN_HEADER, token);
|
||||
} else {
|
||||
log::warn!("No token found for account {}", id);
|
||||
log::warn!("No token found for account {id}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -179,9 +179,12 @@ impl ApiClient {
|
||||
|
||||
let text = response.text().await?;
|
||||
serde_json::from_str(&text).map_err(|e| {
|
||||
log::error!("Failed to deserialize response: {}", e);
|
||||
log::error!("Response text (first 1000 chars): {}", &text[..1000.min(text.len())]);
|
||||
Error::Generic(format!("Deserialization failed: {}", e))
|
||||
log::error!("Failed to deserialize response: {e}");
|
||||
log::error!(
|
||||
"Response text (first 1000 chars): {}",
|
||||
&text[..1000.min(text.len())]
|
||||
);
|
||||
Error::Generic(format!("Deserialization failed: {e}"))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -232,9 +235,12 @@ impl ApiClient {
|
||||
|
||||
let text = response.text().await?;
|
||||
serde_json::from_str(&text).map_err(|e| {
|
||||
log::error!("Failed to deserialize response: {}", e);
|
||||
log::error!("Response text (first 1000 chars): {}", &text[..1000.min(text.len())]);
|
||||
Error::Generic(format!("Deserialization failed: {}", e))
|
||||
log::error!("Failed to deserialize response: {e}");
|
||||
log::error!(
|
||||
"Response text (first 1000 chars): {}",
|
||||
&text[..1000.min(text.len())]
|
||||
);
|
||||
Error::Generic(format!("Deserialization failed: {e}"))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -275,9 +281,12 @@ impl ApiClient {
|
||||
|
||||
let text = response.text().await?;
|
||||
serde_json::from_str(&text).map_err(|e| {
|
||||
log::error!("Failed to deserialize response: {}", e);
|
||||
log::error!("Response text (first 1000 chars): {}", &text[..1000.min(text.len())]);
|
||||
Error::Generic(format!("Deserialization failed: {}", e))
|
||||
log::error!("Failed to deserialize response: {e}");
|
||||
log::error!(
|
||||
"Response text (first 1000 chars): {}",
|
||||
&text[..1000.min(text.len())]
|
||||
);
|
||||
Error::Generic(format!("Deserialization failed: {e}"))
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::api::client::ApiClient;
|
||||
use crate::api::models::{
|
||||
Collection, File, GetCollectionsResponse, GetDiffResponse, GetFileResponse,
|
||||
GetFilesResponse, GetThumbnailUrlResponse, UserDetails,
|
||||
Collection, File, GetCollectionsResponse, GetDiffResponse, GetFileResponse, GetFilesResponse,
|
||||
GetThumbnailUrlResponse, UserDetails,
|
||||
};
|
||||
use crate::models::error::Result;
|
||||
|
||||
@@ -110,7 +110,7 @@ impl<'a> ApiMethods<'a> {
|
||||
} else {
|
||||
// For custom/dev environments, use direct download URL
|
||||
// The Go implementation shows this is the pattern
|
||||
Ok(format!("{}/files/download/{}", base_url, file_id))
|
||||
Ok(format!("{base_url}/files/download/{file_id}"))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ pub struct ExportCommand {
|
||||
/// Email of specific account to export (exports all if not specified)
|
||||
#[arg(long)]
|
||||
pub account: Option<String>,
|
||||
|
||||
|
||||
/// Include shared albums (pass --shared=false to exclude)
|
||||
#[arg(long, default_value = "true")]
|
||||
pub shared: bool,
|
||||
|
||||
@@ -38,7 +38,10 @@ async fn list_accounts(storage: &Storage) -> Result<()> {
|
||||
}
|
||||
|
||||
println!("\nConfigured accounts:\n");
|
||||
println!("{:<30} {:<10} {:<30} {:<40}", "Email", "App", "Endpoint", "Export Directory");
|
||||
println!(
|
||||
"{:<30} {:<10} {:<30} {:<40}",
|
||||
"Email", "App", "Endpoint", "Export Directory"
|
||||
);
|
||||
println!("{}", "-".repeat(110));
|
||||
|
||||
for account in accounts {
|
||||
@@ -46,11 +49,14 @@ async fn list_accounts(storage: &Storage) -> Result<()> {
|
||||
let endpoint_display = if account.endpoint == "https://api.ente.io" {
|
||||
"api.ente.io (prod)".to_string()
|
||||
} else if account.endpoint.starts_with("http://localhost") {
|
||||
format!("localhost:{}", account.endpoint.split(':').last().unwrap_or(""))
|
||||
format!(
|
||||
"localhost:{}",
|
||||
account.endpoint.split(':').next_back().unwrap_or("")
|
||||
)
|
||||
} else {
|
||||
account.endpoint.clone()
|
||||
};
|
||||
|
||||
|
||||
println!(
|
||||
"{:<30} {:<10} {:<30} {:<40}",
|
||||
account.email,
|
||||
@@ -146,7 +152,7 @@ async fn add_account(
|
||||
}
|
||||
|
||||
// Initialize API client with the specified endpoint
|
||||
log::info!("Using API endpoint: {}", endpoint);
|
||||
log::info!("Using API endpoint: {endpoint}");
|
||||
let api_client = ApiClient::new(Some(endpoint.clone()))?;
|
||||
let auth_client = AuthClient::new(&api_client);
|
||||
|
||||
@@ -222,33 +228,39 @@ async fn add_account(
|
||||
&master_key,
|
||||
)?;
|
||||
log::info!("Secret key decrypted, length: {}", secret_key.len());
|
||||
log::info!("Secret key hex (first 16 bytes): {}", hex::encode(&secret_key[..16.min(secret_key.len())]));
|
||||
log::info!(
|
||||
"Secret key hex (first 16 bytes): {}",
|
||||
hex::encode(&secret_key[..16.min(secret_key.len())])
|
||||
);
|
||||
|
||||
// Get public key
|
||||
let public_key = decode_base64(&key_attributes.public_key)?;
|
||||
|
||||
// Decrypt token if encrypted
|
||||
let token = if let Some(encrypted_token) = &auth_response.encrypted_token {
|
||||
log::info!("Encrypted token from server (base64): {}", encrypted_token);
|
||||
log::info!("Encrypted token from server (base64): {encrypted_token}");
|
||||
log::info!("Public key (base64): {}", key_attributes.public_key);
|
||||
|
||||
|
||||
let encrypted_bytes = decode_base64(encrypted_token)?;
|
||||
log::info!("Encrypted token bytes length: {}", encrypted_bytes.len());
|
||||
|
||||
|
||||
let decrypted = sealed_box_open(&encrypted_bytes, &public_key, &secret_key)?;
|
||||
log::info!("Decrypted token bytes length: {}", decrypted.len());
|
||||
log::info!("Decrypted token hex: {}", hex::encode(&decrypted));
|
||||
|
||||
|
||||
// Try to interpret as UTF-8 string first
|
||||
match String::from_utf8(decrypted.clone()) {
|
||||
Ok(token_str) => {
|
||||
log::info!("Decrypted token is UTF-8 string: {}", token_str);
|
||||
log::info!("Decrypted token is UTF-8 string: {token_str}");
|
||||
// If it's a string, use it as bytes
|
||||
token_str.into_bytes()
|
||||
}
|
||||
Err(_) => {
|
||||
log::info!("Token is not UTF-8, using raw bytes");
|
||||
log::info!("Token as base64 URL: {}", base64::engine::general_purpose::URL_SAFE.encode(&decrypted));
|
||||
log::info!(
|
||||
"Token as base64 URL: {}",
|
||||
base64::engine::general_purpose::URL_SAFE.encode(&decrypted)
|
||||
);
|
||||
// If not UTF-8, use raw bytes
|
||||
decrypted
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use crate::Result;
|
||||
use crate::api::client::ApiClient;
|
||||
use crate::api::methods::ApiMethods;
|
||||
use crate::crypto::init as crypto_init;
|
||||
use crate::models::account::Account;
|
||||
use crate::storage::Storage;
|
||||
use crate::Result;
|
||||
use base64::Engine;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tokio::fs;
|
||||
@@ -12,12 +12,12 @@ use tokio::io::AsyncWriteExt;
|
||||
pub async fn run_export(account_email: Option<String>) -> Result<()> {
|
||||
// Initialize crypto
|
||||
crypto_init()?;
|
||||
|
||||
|
||||
// Open database
|
||||
let config_dir = crate::utils::get_cli_config_dir()?;
|
||||
let db_path = config_dir.join("ente.db");
|
||||
let storage = Storage::new(&db_path)?;
|
||||
|
||||
|
||||
// Get accounts to export
|
||||
let accounts = if let Some(email) = account_email {
|
||||
// Export specific account - try to find it with any app
|
||||
@@ -26,159 +26,167 @@ pub async fn run_export(account_email: Option<String>) -> Result<()> {
|
||||
for acc in &all_accounts {
|
||||
log::debug!("Account: email='{}', id={}", acc.email, acc.id);
|
||||
}
|
||||
let matching: Vec<Account> = all_accounts.into_iter()
|
||||
let matching: Vec<Account> = all_accounts
|
||||
.into_iter()
|
||||
.filter(|a| {
|
||||
let matches = a.email == email;
|
||||
log::debug!("Comparing '{}' == '{}': {}", a.email, email, matches);
|
||||
matches
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
||||
if matching.is_empty() {
|
||||
return Err(crate::Error::NotFound(format!("Account not found: {email}")));
|
||||
return Err(crate::Error::NotFound(format!(
|
||||
"Account not found: {email}"
|
||||
)));
|
||||
}
|
||||
matching
|
||||
} else {
|
||||
// Export all accounts
|
||||
storage.accounts().list()?
|
||||
};
|
||||
|
||||
|
||||
if accounts.is_empty() {
|
||||
println!("No accounts configured. Use 'ente-rs account add' first.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
||||
// Export each account
|
||||
for account in accounts {
|
||||
println!("\n=== Exporting account: {} ===", account.email);
|
||||
|
||||
|
||||
if let Err(e) = export_account(&storage, &account).await {
|
||||
log::error!("Failed to export account {}: {}", account.email, e);
|
||||
println!("❌ Export failed: {}", e);
|
||||
println!("❌ Export failed: {e}");
|
||||
} else {
|
||||
println!("✅ Export completed successfully!");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn export_account(storage: &Storage, account: &Account) -> Result<()> {
|
||||
// Get export directory
|
||||
let export_dir = account.export_dir.as_ref()
|
||||
let export_dir = account
|
||||
.export_dir
|
||||
.as_ref()
|
||||
.ok_or_else(|| crate::Error::InvalidInput("No export directory configured".into()))?;
|
||||
let export_path = Path::new(export_dir);
|
||||
|
||||
println!("Export directory: {}", export_dir);
|
||||
|
||||
|
||||
println!("Export directory: {export_dir}");
|
||||
|
||||
// Create export directory if needed
|
||||
fs::create_dir_all(export_path).await?;
|
||||
|
||||
|
||||
// Get stored secrets
|
||||
let secrets = storage.accounts().get_secrets(account.id)?
|
||||
let secrets = storage
|
||||
.accounts()
|
||||
.get_secrets(account.id)?
|
||||
.ok_or_else(|| crate::Error::NotFound("Account secrets not found".into()))?;
|
||||
|
||||
|
||||
// Create API client with account's endpoint
|
||||
let api_client = ApiClient::new(Some(account.endpoint.clone()))?;
|
||||
|
||||
// Store token for this account
|
||||
|
||||
// Store token for this account
|
||||
// Token is stored as raw bytes from sealed_box_open
|
||||
// The Go CLI encodes it as base64 URL-encoded string WITH padding for the API
|
||||
let token = base64::engine::general_purpose::URL_SAFE.encode(&secrets.token);
|
||||
api_client.add_token(&account.email, &token);
|
||||
|
||||
|
||||
let api = ApiMethods::new(&api_client);
|
||||
|
||||
|
||||
// Fetch collections
|
||||
println!("\nFetching collections...");
|
||||
let collections = api.get_collections(&account.email, 0).await?;
|
||||
println!("Found {} collections", collections.len());
|
||||
|
||||
|
||||
// Master key is already raw bytes, no need to decode
|
||||
let _master_key = &secrets.master_key;
|
||||
|
||||
|
||||
// Fetch and export files for each collection
|
||||
println!("\nFetching files...");
|
||||
let mut total_files = 0;
|
||||
let mut exported_files = 0;
|
||||
|
||||
|
||||
for collection in &collections {
|
||||
// Skip deleted collections
|
||||
if collection.is_deleted {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
println!("Processing collection: {}", collection.id);
|
||||
|
||||
|
||||
let mut has_more = true;
|
||||
let mut since_time = 0i64;
|
||||
|
||||
|
||||
while has_more {
|
||||
let (files, more) = api.get_collection_files(&account.email, collection.id, since_time).await?;
|
||||
let (files, more) = api
|
||||
.get_collection_files(&account.email, collection.id, since_time)
|
||||
.await?;
|
||||
has_more = more;
|
||||
|
||||
|
||||
if files.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
total_files += files.len();
|
||||
|
||||
|
||||
for file in files {
|
||||
// Skip deleted files
|
||||
if file.is_deleted {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// Update since_time for next batch
|
||||
if file.updation_time > since_time {
|
||||
since_time = file.updation_time;
|
||||
}
|
||||
|
||||
|
||||
// Generate export path
|
||||
let file_path = generate_export_path(
|
||||
export_path,
|
||||
&file,
|
||||
Some(collection),
|
||||
)?;
|
||||
|
||||
let file_path = generate_export_path(export_path, &file, Some(collection))?;
|
||||
|
||||
// Skip if file already exists
|
||||
if file_path.exists() {
|
||||
log::debug!("File already exists: {:?}", file_path);
|
||||
log::debug!("File already exists: {file_path:?}");
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// Download and save file
|
||||
log::debug!("Downloading file {} to {:?}", file.id, file_path);
|
||||
|
||||
|
||||
// Ensure directory exists
|
||||
if let Some(parent) = file_path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
|
||||
|
||||
// Download encrypted file
|
||||
let encrypted_data = api.download_file(&account.email, file.id).await?;
|
||||
|
||||
|
||||
// Decrypt file (simplified - would need proper key management)
|
||||
// For now, just save the encrypted file
|
||||
let mut file_handle = fs::File::create(&file_path).await?;
|
||||
file_handle.write_all(&encrypted_data).await?;
|
||||
file_handle.sync_all().await?;
|
||||
|
||||
|
||||
exported_files += 1;
|
||||
|
||||
|
||||
// Progress indicator
|
||||
if exported_files % 10 == 0 {
|
||||
println!("Exported {} files...", exported_files);
|
||||
println!("Exported {exported_files} files...");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
println!("\nExport summary:");
|
||||
println!(" Total files: {}", total_files);
|
||||
println!(" Exported: {}", exported_files);
|
||||
println!(" Skipped (already exists or deleted): {}", total_files - exported_files);
|
||||
|
||||
println!(" Total files: {total_files}");
|
||||
println!(" Exported: {exported_files}");
|
||||
println!(
|
||||
" Skipped (already exists or deleted): {}",
|
||||
total_files - exported_files
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -187,28 +195,31 @@ fn generate_export_path(
|
||||
file: &crate::api::models::File,
|
||||
collection: Option<&crate::api::models::Collection>,
|
||||
) -> Result<PathBuf> {
|
||||
use chrono::{Utc, TimeZone};
|
||||
|
||||
use chrono::{TimeZone, Utc};
|
||||
|
||||
// Start with export directory
|
||||
let mut path = export_dir.to_path_buf();
|
||||
|
||||
|
||||
// Add date-based directory structure (YYYY/MM-MonthName)
|
||||
// Use updation_time as creation time proxy
|
||||
let datetime = Utc.timestamp_micros(file.updation_time).single()
|
||||
let datetime = Utc
|
||||
.timestamp_micros(file.updation_time)
|
||||
.single()
|
||||
.ok_or_else(|| crate::Error::Generic("Invalid timestamp".into()))?;
|
||||
|
||||
|
||||
let year = datetime.format("%Y").to_string();
|
||||
let month = datetime.format("%m-%B").to_string(); // e.g., "01-January"
|
||||
|
||||
|
||||
path.push(year);
|
||||
path.push(month);
|
||||
|
||||
|
||||
// Add collection name if available
|
||||
if let Some(col) = collection {
|
||||
if let Some(ref name) = col.name {
|
||||
if !name.is_empty() && name != "Uncategorized" {
|
||||
// Sanitize collection name for filesystem
|
||||
let safe_name: String = name.chars()
|
||||
let safe_name: String = name
|
||||
.chars()
|
||||
.map(|c| match c {
|
||||
'/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
|
||||
c if c.is_control() => '_',
|
||||
@@ -221,10 +232,10 @@ fn generate_export_path(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Generate filename - just use ID with a generic extension for now
|
||||
let filename = format!("file_{}.dat", file.id);
|
||||
path.push(filename);
|
||||
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,4 +5,4 @@
|
||||
|
||||
// pub use engine::SyncEngine;
|
||||
// pub use files::FileProcessor;
|
||||
// pub use download::DownloadManager;
|
||||
// pub use download::DownloadManager;
|
||||
|
||||
Reference in New Issue
Block a user