Press n or j to go to the next uncovered block, b, p or k for the previous block.
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 | 4x 4x 4x 4x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 2x 1x 1x 1x 1x 1x 3x 3x 1x 1x 3x 1x 1x 1x 1x 1x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 9x 9x 3x 6x 6x 10x 10x 24x 9x 9x 24x 9x | /**
* @module Shared/DataLoader
* @description Unified data fetching with fallback, caching, and retry logic.
* Replaces 6+ independent data loading implementations across dashboards.
*
* Features:
* - Local-first with remote fallback
* - localStorage caching with TTL
* - Retry with exponential backoff
* - CSV parsing via PapaParse (CSP-compatible) with simple fallback
* - JSON and text response handling
*
* @intelligence Resilient OSINT data acquisition pipeline — multi-source intelligence data loading with local-first strategy, remote fallback, localStorage caching (TTL-based), retry with exponential backoff, and CSV/JSON parsing. Ensures continuous intelligence availability even during source outages.
*
* @business Platform reliability foundation — data loading resilience directly impacts user experience KPIs (page load time, error rate, Time to Interactive). Caching reduces infrastructure costs and enables offline-capable future PWA offering.
*
* @marketing Performance marketing enabler — fast, reliable data loading supports Core Web Vitals targets (LCP < 2.5s, FID < 100ms) critical for SEO ranking and user retention. Reliability metrics are a key selling point for B2G/enterprise prospects.
* */
import { logger } from './logger.js';
import type { CSVRow, DataSource, LoadOptions } from './types.js';
const DEFAULT_CACHE_TTL = 7 * 24 * 60 * 60 * 1000; // 7 days
const DEFAULT_RETRIES = 3;
const DEFAULT_RETRY_BACKOFF = 2000;
/** Prefix used for all cache keys written by this module. */
export const CACHE_KEY_PREFIX = 'rdm_dl_';
interface CacheEntry {
data: string;
timestamp: number;
}
/**
* Fetch data from a URL with retry logic.
*/
async function fetchWithRetry(
url: string,
retries: number,
backoff: number,
): Promise<Response> {
for (let attempt = 1; attempt <= retries; attempt++) {
try {
const response = await fetch(url);
Eif (response.ok) return response;
logger.warn(`Fetch attempt ${attempt}/${retries} failed for ${url}: ${response.status}`);
} catch (error) {
logger.warn(`Fetch attempt ${attempt}/${retries} error for ${url}:`, error);
}
if (attempt < retries) {
await new Promise((resolve) => setTimeout(resolve, backoff * attempt));
}
}
throw new Error(`Failed to fetch ${url} after ${retries} attempts`);
}
/**
* Get data from localStorage cache if valid.
*/
function getFromCache(key: string, ttl: number): string | null {
try {
const prefixedKey = CACHE_KEY_PREFIX + key;
const raw = localStorage.getItem(prefixedKey);
Eif (!raw) return null;
const entry: CacheEntry = JSON.parse(raw);
if (Date.now() - entry.timestamp > ttl) {
localStorage.removeItem(prefixedKey);
return null;
}
return entry.data;
} catch {
return null;
}
}
/**
* Store data in localStorage cache.
*/
function setCache(key: string, data: string): void {
const prefixedKey = CACHE_KEY_PREFIX + key;
const payload = JSON.stringify({ data, timestamp: Date.now() } as CacheEntry);
try {
localStorage.setItem(prefixedKey, payload);
} catch (e: unknown) {
if (!(e instanceof DOMException && e.name === 'QuotaExceededError')) {
logger.warn('Cache storage error (non-quota):', e);
return;
}
// QuotaExceededError — evict oldest entries from this module's namespace and retry
try {
const entries: { key: string; timestamp: number }[] = [];
for (let i = 0; i < localStorage.length; i++) {
const k = localStorage.key(i);
if (!k || !k.startsWith(CACHE_KEY_PREFIX)) continue;
try {
const parsed: CacheEntry = JSON.parse(localStorage.getItem(k) ?? '');
if (typeof parsed.timestamp === 'number') {
entries.push({ key: k, timestamp: parsed.timestamp });
}
} catch { /* skip non-cache entries */ }
}
// Remove oldest half of this module's cache entries
entries.sort((a, b) => a.timestamp - b.timestamp);
const removeCount = Math.max(1, Math.ceil(entries.length / 2));
entries.slice(0, removeCount).forEach(e => localStorage.removeItem(e.key));
localStorage.setItem(prefixedKey, payload);
} catch {
logger.warn('Failed to cache data — localStorage may be full');
}
}
}
/**
* Load text data from a data source with fallback and caching.
*/
export async function loadText(
source: DataSource,
options: LoadOptions = {},
): Promise<string> {
const {
cacheKey,
cacheTTL = DEFAULT_CACHE_TTL,
retries = DEFAULT_RETRIES,
retryBackoff = DEFAULT_RETRY_BACKOFF,
} = options;
// Check cache first
Eif (cacheKey) {
const cached = getFromCache(cacheKey, cacheTTL);
Iif (cached) {
logger.debug(`Cache hit for ${cacheKey}`);
return cached;
}
}
// Try primary URL, then fallbacks
const urls = [source.primary, ...(source.fallbacks ?? [])];
let lastError: Error | null = null;
for (const url of urls) {
try {
const response = await fetchWithRetry(url, retries, retryBackoff);
const text = await response.text();
Eif (cacheKey) setCache(cacheKey, text);
logger.debug(`Loaded ${url} (${text.length} bytes)`);
return text;
} catch (error) {
lastError = error instanceof Error ? error : new Error(String(error));
logger.warn(`Failed to load from ${url}, trying next fallback...`);
}
}
throw lastError ?? new Error('No data sources provided');
}
/**
* Load and parse CSV data from a data source.
* Uses PapaParse (CSP-compatible) with a simple CSV fallback parser.
*/
export async function loadCSV(
source: DataSource,
options: LoadOptions = {},
): Promise<CSVRow[]> {
const text = await loadText(source, options);
return parseCSV(text);
}
/**
* Load and parse JSON data from a data source.
*/
export async function loadJSON<T = unknown>(
source: DataSource,
options: LoadOptions = {},
): Promise<T> {
const text = await loadText(source, options);
return JSON.parse(text) as T;
}
/**
* Parse CSV text into rows.
* Uses PapaParse if available (CSP-compatible), falls back to a simple CSV parser.
* Does NOT use d3.csvParse as it requires unsafe-eval in CSP.
*/
export function parseCSV(text: string): CSVRow[] {
// Use PapaParse if available (CSP-compatible, no unsafe-eval needed)
const PapaGlobal = (globalThis as Record<string, unknown>).Papa as
| { parse: (text: string, config: Record<string, unknown>) => { data: CSVRow[] } }
| undefined;
if (PapaGlobal?.parse) {
return PapaGlobal.parse(text, { header: true, skipEmptyLines: true }).data;
}
// CSP-safe fallback: simple CSV parser
const lines = text.trim().split('\n');
if (lines.length < 2) return [];
const headers = lines[0]!.split(',').map((h) => h.trim().replace(/^"|"$/g, ''));
return lines.slice(1).filter((l) => l.trim()).map((line) => {
const values = line.split(',').map((v) => v.trim().replace(/^"|"$/g, ''));
const row: CSVRow = {};
headers.forEach((header, i) => {
row[header] = values[i] ?? '';
});
return row;
});
}
/**
* Create a DataSource from local path with optional GitHub raw fallback.
*/
export function createDataSource(
localPath: string,
repoPath?: string,
): DataSource {
const source: DataSource = { primary: localPath };
if (repoPath) {
source.fallbacks = [
`https://raw.githubusercontent.com/Hack23/cia/master/${repoPath}`,
];
}
return source;
}
|