All files / scripts/news-types/weekly-review generator.ts

84.61% Statements 99/117
62.5% Branches 25/40
68.75% Functions 11/16
93.33% Lines 98/105

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317                                                                    13x   13x   13x   13x 13x   13x 13x 13x   13x 13x   13x     13x           12x   12x 36x 6x 6x       12x 12x 12x   12x     12x     12x       12x 12x 12x   12x 12x 12x   12x 12x 12x       12x 12x             12x 29x 29x 29x         12x       13x   13x 1x 1x       11x 11x     11x 11x 11x     11x 11x 11x     11x 11x 11x     11x 11x 11x       11x 11x 11x 13x 12x   11x   11x 4x 4x     3x 3x 3x 3x           11x 11x     11x 11x 11x     11x 11x   11x 15x   15x 15x 15x 15x 15x 15x 15x 15x   15x   15x                               15x             15x 1x 1x       11x                       1x 1x                       15x                                                                                                                     15x            
/**
 * @module news-types/weekly-review/generator
 * @description Main generateWeeklyReview function and title generation helper.
 * Orchestrates the full data pipeline from MCP API calls to final HTML output.
 *
 * @author Hack23 AB
 * @license Apache-2.0
 */
 
import { MCPClient } from '../../mcp-client.js';
import {
  generateArticleContent,
  extractWatchPoints,
  generateMetadata,
  calculateReadTime,
  generateSources,
  type RawDocument,
  type CIAContext,
} from '../../data-transformers.js';
import { generateArticleHTML } from '../../article-template.js';
import type { Language } from '../../types/language.js';
import type { ArticleCategory, GeneratedArticle, GenerationResult, MCPCallRecord } from '../../types/article.js';
import { getCurrentRiksmote } from '../motions.js';
import type { GenerationOptions, TitleSet, VotingRecord } from './types.js';
import { REQUIRED_TOOLS } from './types.js';
import { loadCIAContext, enrichWithFullText, attachSpeechesToDocuments, formatDateForSlug } from './data-loader.js';
import {
  analyzeCoalitionStress,
  calculateWeeklyActivityMetrics,
  generateCoalitionDynamicsSection,
  generateWeeklyActivitySection,
} from './analysis.js';
 
export async function generateWeeklyReview(options: GenerationOptions = {}): Promise<GenerationResult> {
  const { languages = ['en', 'sv'], lookbackDays = 7, writeArticle = null } = options;
 
  console.log('๐Ÿ“Š Generating Weekly Review article (full-document analysis pipeline)...');
 
  const mcpCalls: MCPCallRecord[] = [];
 
  try {
    const client = new MCPClient();
 
    const today = new Date();
    const startDate = new Date(today);
    startDate.setDate(startDate.getDate() - lookbackDays);
 
    const fromStr = formatDateForSlug(startDate);
    const toStr = formatDateForSlug(today);
 
    console.log(`  ๐Ÿ”„ Step 1 โ€” Searching documents ${fromStr} โ†’ ${toStr}...`);
 
    // โ”€โ”€ Step 1: search_dokument to discover IDs and types โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
    const allDocs = await client.searchDocuments({
      from_date: fromStr,
      to_date: toStr,
      limit: 200,
    });
 
    mcpCalls.push({ tool: 'search_dokument', result: allDocs });
 
    const filterRecent = (docs: unknown[]): RawDocument[] =>
      (docs as RawDocument[]).filter(d => {
        const date = (d as Record<string, string>).datum ?? (d as Record<string, string>).publicerad ?? '';
        return date >= fromStr && date <= toStr;
      });
 
    // โ”€โ”€ Step 2: type-specific fetchers for richer metadata (non-fatal) โ”€โ”€โ”€โ”€โ”€
    console.log('  ๐Ÿ”„ Step 2 โ€” Fetching typed metadata (reports, propositions, motions)...');
    const rm = getCurrentRiksmote(today);
    const [reports, propositions, motions] = await Promise.all([
      Promise.resolve()
        .then(() => client.fetchCommitteeReports(50, rm) as Promise<unknown[]>)
        .catch((err: unknown) => { console.error('Failed to fetch committee reports:', err); return [] as unknown[]; }),
      Promise.resolve()
        .then(() => client.fetchPropositions(50, rm) as Promise<unknown[]>)
        .catch((err: unknown) => { console.error('Failed to fetch propositions:', err); return [] as unknown[]; }),
      Promise.resolve()
        .then(() => client.fetchMotions(50, rm) as Promise<unknown[]>)
        .catch((err: unknown) => { console.error('Failed to fetch motions:', err); return [] as unknown[]; }),
    ]);
 
    const recentReports = filterRecent(reports);
    const recentPropositions = filterRecent(propositions);
    const recentMotions = filterRecent(motions);
 
    for (const d of recentReports) { if (!(d as Record<string, string>).doktyp) (d as Record<string, string>).doktyp = 'bet'; }
    for (const d of recentPropositions) { if (!(d as Record<string, string>).doktyp) (d as Record<string, string>).doktyp = 'prop'; }
    for (const d of recentMotions) { if (!(d as Record<string, string>).doktyp) (d as Record<string, string>).doktyp = 'mot'; }
 
    mcpCalls.push({ tool: 'get_betankanden', result: recentReports });
    mcpCalls.push({ tool: 'get_propositioner', result: recentPropositions });
    mcpCalls.push({ tool: 'get_motioner', result: recentMotions });
 
    // Merge: typed docs (with dok_id) are highest quality; supplement with
    // real documents from general search (those that have both dok_id and doktyp).
    const typedDocs = [...recentReports, ...recentPropositions, ...recentMotions];
    const typedDocIds = new Set<string>(
      typedDocs.flatMap(d => {
        const id = (d as Record<string, string>).dok_id;
        return id ? [id] : [];
      }),
    );
 
    const searchExtras = (allDocs as RawDocument[]).filter(d => {
      const id = (d as Record<string, string>).dok_id;
      const type = (d as Record<string, string>).doktyp;
      return id && type && !typedDocIds.has(id);
    });
 
    // Use typed + extras when available; fall back to raw search results (test mocks / edge cases)
    const documents: RawDocument[] =
      typedDocs.length > 0 || searchExtras.length > 0
        ? [...typedDocs, ...searchExtras]
        : (allDocs as RawDocument[]);
 
    console.log(`  ๐Ÿ“Š Found ${documents.length} documents (${recentReports.length} reports, ${recentPropositions.length} propositions, ${recentMotions.length} motions)`);
 
    if (documents.length === 0) {
      console.log('  โ„น๏ธ No documents found for the past week, skipping');
      return { success: true, files: 0, mcpCalls };
    }
 
    // โ”€โ”€ Step 3: load each document completely via get_dokument_innehall โ”€โ”€โ”€โ”€
    console.log('  ๐Ÿ”„ Step 3 โ€” Loading full document content...');
    await enrichWithFullText(client, documents, mcpCalls, 3);
 
    // โ”€โ”€ Step 4: fetch speeches from the period โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
    console.log('  ๐Ÿ”„ Step 4 โ€” Fetching speeches from the period...');
    const speeches = await Promise.resolve()
      .then(() => client.searchSpeeches({ rm, from: fromStr, to: toStr, limit: 100 }) as Promise<unknown[]>)
      .catch((err: unknown) => { console.error('Failed to fetch speeches:', err); return [] as unknown[]; });
 
    mcpCalls.push({ tool: 'search_anforanden', result: speeches });
    attachSpeechesToDocuments(documents, speeches as Array<Record<string, unknown>>);
    console.log(`  ๐Ÿ—ฃ Found ${speeches.length} speeches`);
 
    // โ”€โ”€ Step 5: load CIA intelligence context from static data โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
    console.log('  ๐Ÿ”„ Step 5 โ€” Loading CIA intelligence context...');
    const ciaContext = loadCIAContext();
    console.log(`  ๐Ÿง  CIA context: ${ciaContext.partyPerformance.length} parties, coalition stability ${ciaContext.coalitionStability.stabilityScore}/100, motion denial rate ${ciaContext.overallMotionDenialRate}%`);
 
    // โ”€โ”€ Step 6: fetch voting records for coalition stress analysis โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
    console.log('  ๐Ÿ”„ Step 6 โ€” Fetching voting records for coalition stress analysis...');
    let votingRecords: unknown[] = [];
    try {
      // search_voteringar does not support date params; use rm+limit then filter by datum.
      // Derive the riksmรถte(s) from both ends of the date range using the shared
      // getCurrentRiksmote utility (Sep boundary: month >= 8 โ†’ new session).
      const startRm = getCurrentRiksmote(startDate);
      const endRm = getCurrentRiksmote(today);
      const rmValues = startRm === endRm ? [startRm] : [startRm, endRm];
      const allVotesArrays = await Promise.all(
        rmValues.map(rm => client.fetchVotingRecords({ rm, limit: 200 }) as Promise<VotingRecord[]>),
      );
      const allVotes: VotingRecord[] = allVotesArrays.flat();
      // Post-query filter to the weekly window using the datum field.
      votingRecords = allVotes.filter(r => {
        const d = r.datum;
        if (typeof d !== 'string') return false;
        // Extract YYYY-MM-DD via regex to handle ISO timestamps and timezone suffixes
        // (e.g. '2026-02-10T10:00:00' or '2026-09-05+02:00').
        const match = /^\d{4}-\d{2}-\d{2}/.exec(d);
        Iif (!match) return false;
        const dateStr = match[0];
        return dateStr >= fromStr && dateStr <= toStr;
      });
    } catch (err: unknown) {
      console.error('Failed to fetch voting records:', err);
    }
 
    mcpCalls.push({ tool: 'search_voteringar', result: votingRecords });
    console.log(`  ๐Ÿ—ณ Found ${votingRecords.length} voting records`);
 
    // โ”€โ”€ Compute coalition stress and week-over-week metrics โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
    const coalitionStress = analyzeCoalitionStress(votingRecords as VotingRecord[], ciaContext);
    const weekMetrics = calculateWeeklyActivityMetrics(documents, speeches, votingRecords as VotingRecord[], ciaContext);
    console.log(`  ๐Ÿ“ˆ Coalition risk: ${coalitionStress.riskIndex.level} (${coalitionStress.riskIndex.score}/100), activity: ${weekMetrics.activityChange}`);
 
    // โ”€โ”€ Generate articles โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
    const slug = `${formatDateForSlug(today)}-weekly-review`;
    const articles: GeneratedArticle[] = [];
 
    for (const lang of languages) {
      console.log(`  ๐ŸŒ Generating ${lang.toUpperCase()} version...`);
 
      const content: string = generateArticleContent({ documents, ciaContext }, 'weekly-review', lang);
      const coalitionSection: string = generateCoalitionDynamicsSection(coalitionStress, lang);
      const weekOverWeekSection: string = generateWeeklyActivitySection(weekMetrics, lang);
      const fullContent: string = content + coalitionSection + weekOverWeekSection;
      const watchPoints = extractWatchPoints({ documents, ciaContext }, lang);
      const metadata = generateMetadata({ documents, ciaContext }, 'weekly-review', lang);
      const readTime: string = calculateReadTime(fullContent);
      const sources: string[] = generateSources([...REQUIRED_TOOLS]);
 
      const titles: TitleSet = getTitles(lang, documents.length);
 
      const html: string = generateArticleHTML({
        slug: `${slug}-${lang}.html`,
        title: titles.title,
        subtitle: titles.subtitle,
        date: today.toISOString().split('T')[0] ?? '',
        type: 'retrospective' as ArticleCategory,
        readTime,
        lang,
        content: fullContent,
        watchPoints,
        sources,
        keywords: metadata.keywords,
        topics: metadata.topics,
        tags: metadata.tags
      });
 
      articles.push({
        lang,
        html,
        filename: `${slug}-${lang}.html`,
        slug: `${slug}-${lang}`
      });
 
      if (writeArticle) {
        await writeArticle(html, `${slug}-${lang}.html`);
        console.log(`  โœ… ${lang.toUpperCase()} version generated`);
      }
    }
 
    return {
      success: true,
      files: languages.length,
      slug,
      articles,
      mcpCalls,
      crossReferences: {
        event: `${documents.length} documents over ${lookbackDays} days`,
        sources: ['search_dokument', 'get_dokument_innehall', 'search_anforanden', 'get_betankanden', 'get_propositioner', 'get_motioner', 'search_voteringar']
      }
    };
  } catch (error: unknown) {
    console.error('โŒ Error generating Weekly Review:', (error as Error).message);
    return {
      success: false,
      error: (error as Error).message,
      mcpCalls
    };
  }
}
 
/**
 * Get language-specific titles
 */
function getTitles(lang: Language, documentCount: number): TitleSet {
  const titles: Record<Language, TitleSet> = {
    en: {
      title: `Weekly Review: Parliament in Retrospect`,
      subtitle: `Analysis of ${documentCount} key developments from the past week in Swedish politics`
    },
    sv: {
      title: `Veckans sammanfattning: Riksdagen i retrospektiv`,
      subtitle: `Analys av ${documentCount} viktiga hรคndelser frรฅn den gรฅngna veckan`
    },
    da: {
      title: `Ugentlig gennemgang: Parlamentet i tilbageblik`,
      subtitle: `Analyse af ${documentCount} vigtige begivenheder fra den forgangne uge`
    },
    no: {
      title: `Ukentlig gjennomgang: Stortinget i retrospekt`,
      subtitle: `Analyse av ${documentCount} viktige hendelser fra den siste uken`
    },
    fi: {
      title: `Viikkokatsaus: Eduskunta jรคlkikรคteen`,
      subtitle: `Analyysi ${documentCount} tรคrkeรคstรค tapahtumasta viime viikolta`
    },
    de: {
      title: `Wochenrรผckblick: Parlament in Rรผckschau`,
      subtitle: `Analyse von ${documentCount} wichtigen Entwicklungen der vergangenen Woche`
    },
    fr: {
      title: `Revue hebdomadaire : Le Parlement en rรฉtrospective`,
      subtitle: `Analyse de ${documentCount} dรฉveloppements clรฉs de la semaine รฉcoulรฉe`
    },
    es: {
      title: `Revisiรณn semanal: El Parlamento en retrospectiva`,
      subtitle: `Anรกlisis de ${documentCount} desarrollos clave de la semana pasada`
    },
    nl: {
      title: `Wekelijkse terugblik: Parlement in retrospectief`,
      subtitle: `Analyse van ${documentCount} belangrijke ontwikkelingen van de afgelopen week`
    },
    ar: {
      title: `ุงู„ู…ุฑุงุฌุนุฉ ุงู„ุฃุณุจูˆุนูŠุฉ: ุงู„ุจุฑู„ู…ุงู† ููŠ ุงุณุชุนุฑุงุถ`,
      subtitle: `ุชุญู„ูŠู„ ${documentCount} ุชุทูˆุฑุงุช ุฑุฆูŠุณูŠุฉ ู…ู† ุงู„ุฃุณุจูˆุน ุงู„ู…ุงุถูŠ`
    },
    he: {
      title: `ืกืงื™ืจื” ืฉื‘ื•ืขื™ืช: ื”ืคืจืœืžื ื˜ ื‘ืจืื™`,
      subtitle: `ื ื™ืชื•ื— ${documentCount} ื”ืชืคืชื—ื•ื™ื•ืช ืžืจื›ื–ื™ื•ืช ืžื”ืฉื‘ื•ืข ืฉืขื‘ืจ`
    },
    ja: {
      title: `้€ฑ้–“ใƒฌใƒ“ใƒฅใƒผ๏ผš่ญฐไผšใฎๆŒฏใ‚Š่ฟ”ใ‚Š`,
      subtitle: `ๅ…ˆ้€ฑใฎ${documentCount}ไปถใฎไธป่ฆใชๅ‹•ๅ‘ใฎๅˆ†ๆž`
    },
    ko: {
      title: `์ฃผ๊ฐ„ ๋ฆฌ๋ทฐ: ์˜ํšŒ ํšŒ๊ณ `,
      subtitle: `์ง€๋‚œ ์ฃผ ${documentCount}๊ฑด์˜ ์ฃผ์š” ๋™ํ–ฅ ๋ถ„์„`
    },
    zh: {
      title: `ๆฏๅ‘จๅ›ž้กพ๏ผš่ฎฎไผšๅ›ž้กพ`,
      subtitle: `่ฟ‡ๅŽปไธ€ๅ‘จ${documentCount}้กน้‡่ฆๅ‘ๅฑ•็š„ๅˆ†ๆž`
    }
  };
 
  return titles[lang] || titles.en;
}
 
/**
 * Validate weekly review article structure
 */