? Osa: Parallel Processing

High-performance concurrent data processing.

Use Case 1: Batch Image Resizing

// batch_resize.ifa - Parallel image processing

ise resize_image(path, width, height) {
    ayanmo img = Fidio.load_image(path);
    ayanmo resized = Fidio.resize(img, width, height);
    
    ayanmo filename = Ika.split(path, "/");
    ayanmo name = Ogunda.last(filename);
    ayanmo output = "thumbnails/" + name;
    
    Fidio.save_image(resized, output);
    padap? output;
}

ise batch_resize(input_dir, width, height) {
    ayanmo files = Odi.list(input_dir);
    ayanmo images = [];
    
    // Filter image files
    fun file ninu files {
        ti (Ika.ends_with(file, ".jpg") || Ika.ends_with(file, ".png")) {
            ayanmo images = Ogunda.push(images, input_dir + "/" + file);
        }
    }
    
    Irosu.fo("Processing " + Ogunda.len(images) + " images in parallel...");
    
    // Process in parallel
    ayanmo start = Iwori.now_ms();
    ayanmo results = Osa.map(images, |path| {
        resize_image(path, width, height)
    });
    ayanmo elapsed = Iwori.now_ms() - start;
    
    Irosu.fo("? Completed in " + elapsed + "ms");
    Irosu.fo("Processed " + Ogunda.len(results) + " images");
}

// Create output directory
Odi.mkdir("thumbnails");
batch_resize("./photos", 200, 200);

Use Case 2: Log Analysis Pipeline

// log_analysis.ifa - Parallel log file analysis

ise parse_log_line(line) {
    // Format: [timestamp] level message
    ayanmo parts = Ika.split(line, " ");
    ti (Ogunda.len(parts) < 3) {
        padap? nil;
    }
    
    padap? {
        "timestamp": Ogunda.get(parts, 0),
        "level": Ogunda.get(parts, 1),
        "message": Ika.join(Ogunda.slice(parts, 2, Ogunda.len(parts)), " ")
    };
}

ise analyze_logs(log_files) {
    // Read all files in parallel
    ayanmo all_lines = Osa.flat_map(log_files, |file| {
        ayanmo content = Odi.read(file);
        padap? Ika.split(content, "\n");
    });
    
    // Parse lines in parallel
    ayanmo entries = Osa.filter_map(all_lines, parse_log_line);
    
    // Count by level in parallel
    ayanmo error_count = Osa.count(entries, |e| e["level"] == "ERROR");
    ayanmo warn_count = Osa.count(entries, |e| e["level"] == "WARN");
    ayanmo info_count = Osa.count(entries, |e| e["level"] == "INFO");
    
    // Extract error messages
    ayanmo errors = Osa.filter(entries, |e| e["level"] == "ERROR");
    
    padap? {
        "total": Ogunda.len(entries),
        "errors": error_count,
        "warnings": warn_count,
        "info": info_count,
        "error_messages": Osa.map(errors, |e| e["message"])
    };
}

// Usage
ayanmo files = ["app.log", "server.log", "worker.log"];
ayanmo stats = analyze_logs(files);

Irosu.fo("?? Log Analysis Results");
Irosu.fo("Total entries: " + stats["total"]);
Irosu.fo("Errors: " + stats["errors"]);
Irosu.fo("Warnings: " + stats["warnings"]);
Irosu.fo("Info: " + stats["info"]);

Use Case 3: Data Aggregation

// aggregate.ifa - Parallel data aggregation

ayanmo sales_data = [
    {"region": "North", "product": "A", "amount": 1500},
    {"region": "South", "product": "B", "amount": 2300},
    {"region": "North", "product": "B", "amount": 1800},
    {"region": "East", "product": "A", "amount": 950},
    {"region": "West", "product": "C", "amount": 3200},
    {"region": "North", "product": "C", "amount": 2100},
    {"region": "South", "product": "A", "amount": 1750},
    {"region": "East", "product": "B", "amount": 1400}
];

// Parallel aggregations
ayanmo total = Osa.sum(Osa.map(sales_data, |s| s["amount"]));
ayanmo avg = total / Ogunda.len(sales_data);
ayanmo max_sale = Osa.max(Osa.map(sales_data, |s| s["amount"]));
ayanmo min_sale = Osa.min(Osa.map(sales_data, |s| s["amount"]));

// Group by region (parallel reduce)
ise group_by_region(data) {
    ayanmo regions = {};
    
    fun sale ninu data {
        ayanmo region = sale["region"];
        ti (regions[region] == nil) {
            regions[region] = 0;
        }
        regions[region] = regions[region] + sale["amount"];
    }
    
    padap? regions;
}

ayanmo by_region = group_by_region(sales_data);

Irosu.fo("?? Sales Summary");
Irosu.fo("----------------");
Irosu.fo("Total: $" + total);
Irosu.fo("Average: $" + avg);
Irosu.fo("Max: $" + max_sale);
Irosu.fo("Min: $" + min_sale);
Irosu.fo("\nBy Region:");
fun region key(by_region) {
    Irosu.fo("  " + region + ": $" + by_region[region]);
}

Use Case 4: Parallel Web Scraping

// scraper.ifa - Fetch multiple URLs concurrently

ise fetch_page(url) {
    ayanmo start = Iwori.now_ms();
    ayanmo response = Otura.get(url);
    ayanmo elapsed = Iwori.now_ms() - start;
    
    padap? {
        "url": url,
        "status": response["status"],
        "size": Ika.len(response["body"]),
        "time_ms": elapsed
    };
}

ise scrape_urls(urls) {
    Irosu.fo("Fetching " + Ogunda.len(urls) + " URLs in parallel...");
    
    ayanmo start = Iwori.now_ms();
    ayanmo results = Osa.map(urls, fetch_page);
    ayanmo total_time = Iwori.now_ms() - start;
    
    // Calculate stats
    ayanmo successful = Osa.filter(results, |r| r["status"] == 200);
    ayanmo total_size = Osa.sum(Osa.map(results, |r| r["size"]));
    ayanmo avg_time = Osa.sum(Osa.map(results, |r| r["time_ms"])) / Ogunda.len(results);
    
    Irosu.fo("\n?? Scraping Results");
    Irosu.fo("-------------------");
    Irosu.fo("Total time: " + total_time + "ms");
    Irosu.fo("Successful: " + Ogunda.len(successful) + "/" + Ogunda.len(urls));
    Irosu.fo("Total data: " + (total_size / 1024) + " KB");
    Irosu.fo("Avg response: " + avg_time + "ms");
    
    padap? results;
}

// Usage
ayanmo urls = [
    "https://api.example.com/users",
    "https://api.example.com/products",
    "https://api.example.com/orders",
    "https://api.example.com/inventory",
    "https://api.example.com/reports"
];

scrape_urls(urls);