(** Changelog generation for monopam. This module handles generating weekly and daily changelog entries using Claude AI to analyze git commit history and produce user-facing change summaries. Changes are stored in a .changes directory at the monorepo root: - .changes/.json - weekly changelog entries - .changes/-.json - daily changelog entries (one file per day per repo) - .changes/YYYYMMDD.json - aggregated daily changes for broadcasting {1 Submodules} - {!Aggregated} - Types and I/O for aggregated daily changes (YYYYMMDD.json) - {!Daily} - Types and I/O for per-day-per-repo changes (repo-YYYY-MM-DD.json) - {!Query} - High-level query interface for changes *) module Aggregated = Changes_aggregated (** Re-export submodules for querying changes *) module Daily = Changes_daily module Query = Changes_query (** {1 Error Helpers} *) let err_parse name e = Error (Fmt.str "Failed to parse %s: %s" name (Json.Error.to_string e)) let err_claude_parse e = Error (Fmt.str "Failed to parse Claude response: %s" (Json.Error.to_string e)) let err_decode e = Error (Fmt.str "Failed to decode response: %s" (Json.Error.to_string e)) type commit_range = { from_hash : string; to_hash : string; count : int } type weekly_entry = { week_start : string; (* ISO date YYYY-MM-DD, Monday *) week_end : string; (* ISO date YYYY-MM-DD, Sunday *) summary : string; (* One-line summary *) changes : string list; (* Bullet points *) commit_range : commit_range; } type daily_entry = { date : string; (* ISO date YYYY-MM-DD *) hour : int; (* Hour of day 0-23 *) timestamp : Ptime.t; (* RFC3339 timestamp for precise ordering *) summary : string; (* One-line summary *) changes : string list; (* Bullet points *) commit_range : commit_range; contributors : string list; (* List of contributors for this entry *) repo_url : string option; (* Upstream repository URL *) } type file = { repository : string; entries : weekly_entry list } type daily_file = { repository : string; entries : daily_entry list } (** Mode for changelog generation *) type mode = Weekly | Daily (* Jsont codecs *) let commit_range_jsont = let open Json.Codec in let make from_hash to_hash count = { from_hash; to_hash; count } in Object.map ~kind:"commit_range" make |> Object.member "from" string ~enc:(fun r -> r.from_hash) |> Object.member "to" string ~enc:(fun r -> r.to_hash) |> Object.member "count" int ~enc:(fun r -> r.count) |> Object.seal let weekly_entry_jsont : weekly_entry Json.codec = let open Json.Codec in let make week_start week_end summary changes commit_range : weekly_entry = { week_start; week_end; summary; changes; commit_range } in Object.map ~kind:"weekly_entry" make |> Object.member "week_start" string ~enc:(fun (e : weekly_entry) -> e.week_start) |> Object.member "week_end" string ~enc:(fun (e : weekly_entry) -> e.week_end) |> Object.member "summary" string ~enc:(fun (e : weekly_entry) -> e.summary) |> Object.member "changes" (list string) ~enc:(fun (e : weekly_entry) -> e.changes) |> Object.member "commit_range" commit_range_jsont ~enc:(fun (e : weekly_entry) -> e.commit_range) |> Object.seal let file_jsont : file Json.codec = let open Json.Codec in let make repository entries : file = { repository; entries } in Object.map ~kind:"changes_file" make |> Object.member "repository" string ~enc:(fun (f : file) -> f.repository) |> Object.member "entries" (list weekly_entry_jsont) ~enc:(fun (f : file) -> f.entries) |> Object.seal let ptime_jsont = let open Json.Codec in let enc t = Ptime.to_rfc3339 t ~tz_offset_s:0 in let dec s = match Ptime.of_rfc3339 s with | Ok (t, _, _) -> t | Error _ -> failwith ("Invalid timestamp: " ^ s) in map ~dec ~enc string let daily_entry_jsont : daily_entry Json.codec = let open Json.Codec in let make date hour timestamp summary changes commit_range contributors repo_url : daily_entry = { date; hour; timestamp; summary; changes; commit_range; contributors; repo_url; } in (* Default hour and timestamp for backwards compat when reading old files *) let default_hour = 0 in let default_timestamp = Ptime.epoch in Object.map ~kind:"daily_entry" make |> Object.member "date" string ~enc:(fun (e : daily_entry) -> e.date) |> Object.member "hour" int ~dec_absent:default_hour ~enc:(fun (e : daily_entry) -> e.hour) |> Object.member "timestamp" ptime_jsont ~dec_absent:default_timestamp ~enc:(fun (e : daily_entry) -> e.timestamp) |> Object.member "summary" string ~enc:(fun (e : daily_entry) -> e.summary) |> Object.member "changes" (list string) ~enc:(fun (e : daily_entry) -> e.changes) |> Object.member "commit_range" commit_range_jsont ~enc:(fun (e : daily_entry) -> e.commit_range) |> Object.member "contributors" (list string) ~dec_absent:[] ~enc:(fun (e : daily_entry) -> e.contributors) |> Object.member "repo_url" (option string) ~dec_absent:None ~enc:(fun (e : daily_entry) -> e.repo_url) |> Object.seal let daily_file_jsont : daily_file Json.codec = let open Json.Codec in let make repository entries : daily_file = { repository; entries } in Object.map ~kind:"daily_changes_file" make |> Object.member "repository" string ~enc:(fun (f : daily_file) -> f.repository) |> Object.member "entries" (list daily_entry_jsont) ~enc:(fun (f : daily_file) -> f.entries) |> Object.seal (* File I/O *) (* Helper to ensure .changes directory exists *) let ensure_changes_dir ~fs monorepo = let changes_dir = Eio.Path.(fs / Fpath.to_string monorepo / ".changes") in match Eio.Path.kind ~follow:true changes_dir with | `Directory -> () | _ -> Eio.Path.mkdir ~perm:0o755 changes_dir | exception Eio.Io _ -> Eio.Path.mkdir ~perm:0o755 changes_dir (* Load weekly changes from .changes/.json in monorepo *) let load ~fs ~monorepo repo_name = let file_path = Eio.Path.( fs / Fpath.to_string monorepo / ".changes" / (repo_name ^ ".json")) in match Eio.Path.kind ~follow:true file_path with | `Regular_file -> ( let content = Eio.Path.load file_path in match Json.of_string file_jsont content with | Ok cf -> Ok cf | Error e -> err_parse (repo_name ^ ".json") e) | _ -> Ok { repository = repo_name; entries = [] } | exception Eio.Io _ -> Ok { repository = repo_name; entries = [] } (* Save weekly changes to .changes/.json in monorepo *) let save ~fs ~monorepo (cf : file) = ensure_changes_dir ~fs monorepo; let file_path = Eio.Path.( fs / Fpath.to_string monorepo / ".changes" / (cf.repository ^ ".json")) in let content = Json.to_string ~indent:2 file_jsont cf in Eio.Path.save ~create:(`Or_truncate 0o644) file_path content; Ok () (* Filename for daily changes: -.json *) let daily_filename repo_name date = repo_name ^ "-" ^ date ^ ".json" (* Check if daily file exists on disk *) let daily_exists ~fs ~monorepo ~date repo_name = let filename = daily_filename repo_name date in let file_path = Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / filename) in match Eio.Path.kind ~follow:true file_path with | `Regular_file -> true | _ -> false | exception Eio.Io _ -> false (* Load daily changes from .changes/-.json in monorepo *) let load_daily ~fs ~monorepo ~date repo_name = let filename = daily_filename repo_name date in let file_path = Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / filename) in match Eio.Path.kind ~follow:true file_path with | `Regular_file -> ( let content = Eio.Path.load file_path in match Json.of_string daily_file_jsont content with | Ok cf -> Ok cf | Error e -> err_parse filename e) | _ -> Ok { repository = repo_name; entries = [] } | exception Eio.Io _ -> Ok { repository = repo_name; entries = [] } (* Save daily changes to .changes/-.json in monorepo *) let save_daily ~fs ~monorepo ~date (cf : daily_file) = ensure_changes_dir ~fs monorepo; let filename = daily_filename cf.repository date in let file_path = Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / filename) in let content = Json.to_string ~indent:2 daily_file_jsont cf in Eio.Path.save ~create:(`Or_truncate 0o644) file_path content; Ok () (* Markdown generation *) let to_markdown (cf : file) = let buf = Buffer.create 1024 in Buffer.add_string buf (Fmt.str "# %s Changelog\n\n" cf.repository); List.iter (fun (entry : weekly_entry) -> Buffer.add_string buf (Fmt.str "## Week of %s to %s\n\n" entry.week_start entry.week_end); Buffer.add_string buf (Fmt.str "%s\n\n" entry.summary); List.iter (fun change -> Buffer.add_string buf (Fmt.str "- %s\n" change)) entry.changes; Buffer.add_string buf "\n") cf.entries; Buffer.contents buf let group_weekly_entries sorted = let rec loop acc current_week current_group = function | [] -> if current_group <> [] then (current_week, List.rev current_group) :: acc else acc | (repo, (entry : weekly_entry)) :: rest -> let week_key = entry.week_start ^ " to " ^ entry.week_end in if current_week = "" || current_week = week_key then loop acc week_key ((repo, entry) :: current_group) rest else loop ((current_week, List.rev current_group) :: acc) week_key [ (repo, entry) ] rest in List.rev (loop [] "" [] sorted) let aggregate ~history (cfs : file list) = let all_entries = List.concat_map (fun (cf : file) -> List.map (fun (e : weekly_entry) -> (cf.repository, e)) cf.entries) cfs in let sorted = List.sort (fun (_, (e1 : weekly_entry)) (_, (e2 : weekly_entry)) -> String.compare e2.week_start e1.week_start) all_entries in let grouped = group_weekly_entries sorted in let limited = if history > 0 then List.filteri (fun i _ -> i < history) grouped else grouped in let buf = Buffer.create 4096 in Buffer.add_string buf "# Changelog\n\n"; List.iter (fun (week_key, entries) -> Buffer.add_string buf (Fmt.str "## Week of %s\n\n" week_key); List.iter (fun (repo, (entry : weekly_entry)) -> Buffer.add_string buf (Fmt.str "### %s\n" repo); Buffer.add_string buf (Fmt.str "%s\n" entry.summary); List.iter (fun change -> Buffer.add_string buf (Fmt.str "- %s\n" change)) entry.changes; Buffer.add_string buf "\n") entries) limited; Buffer.contents buf (* Week calculation *) (* Day of week: 0 = Sunday, 1 = Monday, ... 6 = Saturday *) let day_of_week year month day = match Ptime.of_date (year, month, day) with | None -> 0 | Some t -> ( match Ptime.weekday t with | `Sun -> 0 | `Mon -> 1 | `Tue -> 2 | `Wed -> 3 | `Thu -> 4 | `Fri -> 5 | `Sat -> 6) let add_days (y, m, d) n = match Ptime.of_date (y, m, d) with | None -> (y, m, d) | Some t -> ( let span = Ptime.Span.of_int_s (n * 86400) in match Ptime.add_span t span with | Some t' -> let (y', m', d'), _ = Ptime.to_date_time t' in (y', m', d') | None -> (y, m, d)) let format_date (y, m, d) = Fmt.str "%04d-%02d-%02d" y m d let week_of_date (y, m, d) = let dow = day_of_week y m d in (* Monday = 1, so days to subtract to get to Monday *) let days_to_monday = if dow = 0 then 6 else dow - 1 in let monday = add_days (y, m, d) (-days_to_monday) in let sunday = add_days monday 6 in (format_date monday, format_date sunday) let week_of_ptime t = let (y, m, d), _ = Ptime.to_date_time t in week_of_date (y, m, d) let week_timestamps_of_ptime t = let (y, m, d), _ = Ptime.to_date_time t in let dow = day_of_week y m d in let days_to_monday = if dow = 0 then 6 else dow - 1 in let monday = add_days (y, m, d) (-days_to_monday) in let sunday = add_days monday 6 in let to_timestamp (y, m, d) hh mm ss = match Ptime.of_date_time ((y, m, d), ((hh, mm, ss), 0)) with | Some pt -> Int64.of_float (Ptime.to_float_s pt) | None -> 0L in (to_timestamp monday 0 0 0, to_timestamp sunday 23 59 59) let day_timestamps_of_ptime t = let (y, m, d), _ = Ptime.to_date_time t in let to_timestamp hh mm ss = match Ptime.of_date_time ((y, m, d), ((hh, mm, ss), 0)) with | Some pt -> Int64.of_float (Ptime.to_float_s pt) | None -> 0L in (to_timestamp 0 0 0, to_timestamp 23 59 59) let has_week (cf : file) ~week_start = List.exists (fun (e : weekly_entry) -> e.week_start = week_start) cf.entries let date_of_ptime t = let (y, m, d), _ = Ptime.to_date_time t in format_date (y, m, d) let has_day (cf : daily_file) ~date:_ = (* With per-day files, the file is already for a specific date. This function now checks if the file has any entries. *) cf.entries <> [] (* Format a single daily entry for markdown *) let format_daily_entry buf repo (entry : daily_entry) = let repo_header = match entry.repo_url with | Some url -> Fmt.str "[%s](%s)" repo url | None -> repo in Buffer.add_string buf (Fmt.str "### %s\n\n" repo_header); Buffer.add_string buf (Fmt.str "%s\n\n" entry.summary); List.iter (fun change -> Buffer.add_string buf (Fmt.str "- %s\n" change)) entry.changes; if entry.contributors <> [] then begin let contributors_str = String.concat ", " entry.contributors in Buffer.add_string buf (Fmt.str "\n*Contributors: %s*\n" contributors_str) end; Buffer.add_string buf "\n" (* Format entries for a single date *) let format_date_entries buf date entries = let entries_with_changes = List.filter (fun (_, (entry : daily_entry)) -> entry.changes <> []) entries in if entries_with_changes <> [] then begin Buffer.add_string buf (Fmt.str "## %s\n\n" date); List.iter (fun (repo, (entry : daily_entry)) -> format_daily_entry buf repo entry) entries_with_changes end (* Aggregate daily changes into DAILY-CHANGES.md *) let aggregate_daily ~history (cfs : daily_file list) = (* Collect all entries from all files, tagged with repository *) let all_entries = List.concat_map (fun (cf : daily_file) -> List.map (fun (e : daily_entry) -> (cf.repository, e)) cf.entries) cfs in (* Sort by date descending *) let sorted = List.sort (fun (_, (e1 : daily_entry)) (_, (e2 : daily_entry)) -> String.compare e2.date e1.date) all_entries in (* Group by date *) let rec group_by_date acc current_date current_group = function | [] -> if current_group <> [] then (current_date, List.rev current_group) :: acc else acc | (repo, (entry : daily_entry)) :: rest -> if current_date = "" || current_date = entry.date then group_by_date acc entry.date ((repo, entry) :: current_group) rest else group_by_date ((current_date, List.rev current_group) :: acc) entry.date [ (repo, entry) ] rest in let grouped = List.rev (group_by_date [] "" [] sorted) in (* Take only the requested number of days *) let limited = if history > 0 then List.filteri (fun i _ -> i < history) grouped else grouped in (* Generate markdown - only include repos with actual changes *) let buf = Buffer.create 4096 in Buffer.add_string buf "# Daily Changelog\n\n"; List.iter (fun (date, entries) -> format_date_entries buf date entries) limited; Buffer.contents buf (* Claude prompt generation *) let format_commit_block buf (commit : Git.Repository.log_entry) = Buffer.add_string buf (Fmt.str "### %s by %s (%s)\n" (String.sub commit.hash 0 (min 7 (String.length commit.hash))) commit.author commit.date); Buffer.add_string buf (Fmt.str "%s\n\n" commit.subject); if commit.body <> "" then begin Buffer.add_string buf (Fmt.str "%s\n" commit.body) end; Buffer.add_string buf "---\n\n" let changelog_instructions = {|## Instructions: 1. Focus on USER-FACING changes only. Skip: - Internal refactoring with no API impact - CI/build system tweaks - Typo fixes in code comments - Dependency bumps (unless they add features) 2. IMPORTANT: If there are NO user-facing changes, output a blank entry with empty summary and empty changes array. Do NOT write "no changes" or similar text. Example for no changes: {"summary": "", "changes": []} 3. Otherwise, respond in this exact JSON format: { "summary": "One sentence describing the most important change", "changes": [ "First user-facing change as a bullet point", "Second change", "..." ] } 4. Write for developers using this library. Be: - Concise (max 80 chars per bullet) - Specific (mention function/module names) - Action-oriented (start with verbs: Added, Fixed, Improved, Removed) 5. Maximum 5 bullet points. Group related changes if needed. |} let generate_weekly_prompt ~repository ~week_start ~week_end commits = let buf = Buffer.create 4096 in Buffer.add_string buf (Fmt.str "You are analyzing git commits for the OCaml library \"%s\".\n" repository); Buffer.add_string buf (Fmt.str "Generate a user-facing changelog entry for the week of %s to %s.\n\n" week_start week_end); Buffer.add_string buf "## Commits this week:\n\n"; List.iter (format_commit_block buf) commits; Buffer.add_string buf changelog_instructions; Buffer.contents buf let generate_daily_prompt ~repository ~date commits = let buf = Buffer.create 4096 in Buffer.add_string buf (Fmt.str "You are analyzing git commits for the OCaml library \"%s\".\n" repository); Buffer.add_string buf (Fmt.str "Generate a user-facing changelog entry for %s.\n\n" date); Buffer.add_string buf "## Commits today:\n\n"; List.iter (format_commit_block buf) commits; Buffer.add_string buf changelog_instructions; Buffer.contents buf (* Backwards compatibility *) let generate_prompt = generate_weekly_prompt (* Response parsing *) type claude_response = { summary : string; changes : string list } let claude_response_jsont = let open Json.Codec in let make summary changes = { summary; changes } in Object.map ~kind:"claude_response" make |> Object.member "summary" string ~enc:(fun r -> r.summary) |> Object.member "changes" (list string) ~enc:(fun r -> r.changes) |> Object.seal let parse_claude_response text = let text = String.trim text in (* Legacy support for NO_CHANGES response *) if text = "NO_CHANGES" then Ok None else match Json.of_string claude_response_jsont text with | Ok r -> (* Treat empty summary and changes as no changes *) if r.summary = "" && r.changes = [] then Ok None else Ok (Some r) | Error e -> err_claude_parse e (* Main analysis function *) let changelog_output_schema = Json.Value.of_string_exn {|{ "type": "object", "properties": { "summary": { "type": "string" }, "changes": { "type": "array", "items": { "type": "string" } } }, "required": ["summary", "changes"] }|} let process_claude_responses responses = let result = ref None in List.iter (function | Claude.Response.Complete c -> ( match Claude.Response.Complete.structured_output c with | Some json -> ( match Json.decode claude_response_jsont json with | Ok r -> if r.summary = "" && r.changes = [] then result := Some (Ok None) else result := Some (Ok (Some r)) | Error e -> result := Some (err_decode e)) | None -> ( match Claude.Response.Complete.result_text c with | Some text -> result := Some (parse_claude_response text) | None -> result := Some (Ok None))) | Claude.Response.Text t -> let text = Claude.Response.Text.content t in if String.trim text = "NO_CHANGES" then result := Some (Ok None) | Claude.Response.Error e -> result := Some (Error (Fmt.str "Claude error: %s" (Claude.Response.Error.message e))) | _ -> ()) responses; match !result with Some r -> r | None -> Ok None let run_claude_analysis ~sw ~process_mgr ~clock prompt = let output_format = Claude.Structured_output.of_json_schema changelog_output_schema in let options = Claude.Options.default |> Claude.Options.with_output_format output_format |> Claude.Options.with_max_turns 1 in let client = Claude.Client.v ~sw ~process_mgr ~clock ~options () in Claude.Client.query client prompt; let responses = Claude.Client.receive_all client in process_claude_responses responses let analyze_commits ~sw ~process_mgr ~clock ~repository ~week_start ~week_end commits = if commits = [] then Ok None else let prompt = generate_prompt ~repository ~week_start ~week_end commits in run_claude_analysis ~sw ~process_mgr ~clock prompt (* Daily analysis function *) let analyze_commits_daily ~sw ~process_mgr ~clock ~repository ~date commits = if commits = [] then Ok None else let prompt = generate_daily_prompt ~repository ~date commits in run_claude_analysis ~sw ~process_mgr ~clock prompt (* Refine daily changelog markdown to be more narrative *) let refine_daily_changelog ~sw ~process_mgr ~clock markdown = let prompt = Fmt.str {|You are editing a daily changelog for an OCaml monorepo. Your task is to refine the following changelog to be: 1. More narrative and human-readable - write it as a daily update that developers will want to read 2. Grouped by related changes - if multiple repos have related changes, group them together 3. Succinct but complete - don't lose any information, but make it more concise 4. Well-ordered - put the most significant changes first Keep the markdown format with: - A main heading for each date - Sub-sections for related groups of changes (not necessarily by repo), such as "New Libraries", "Major Features", "Critical Bug Fixes", "Code Quality Improvements", "Documentation Updates" - Bullet points for individual changes - Preserve all contributor attributions (format: — *Contributor Name*) - IMPORTANT: Every repository name MUST be a markdown link. If a repo already has a link, preserve it. If not, generate one using the pattern: [repo-name](https://tangled.org/@anil.recoil.org/repo-name.git) - Format each bullet as: **[repo-name](url)**: Description — *Contributors* (if any) IMPORTANT: For "initial import" or "added as subtree" entries: - Put these in a dedicated "New Libraries" section - Expand the description to explain what the library does and its purpose - If the library relates to other libraries in the monorepo (e.g., uses ocaml-requests for HTTP, complements ocaml-imap, etc.), mention those relationships with links - Example: Instead of "Initial import of ocaml-jmap library", write "OCaml implementation of the JMAP protocol — a modern, JSON-based alternative to IMAP for email access. Complements the existing [ocaml-imap](https://tangled.org/@anil.recoil.org/ocaml-imap.git) library" Here is the changelog to refine: %s Output ONLY the refined markdown, no explanation or preamble.|} markdown in let options = Claude.Options.default |> Claude.Options.with_max_turns 1 in let client = Claude.Client.v ~sw ~process_mgr ~clock ~options () in Claude.Client.query client prompt; let responses = Claude.Client.receive_all client in let result = ref None in List.iter (function | Claude.Response.Complete c -> ( match Claude.Response.Complete.result_text c with | Some text -> result := Some (Ok text) | None -> result := Some (Ok markdown) (* fallback to original *)) | Claude.Response.Error e -> result := Some (Error (Fmt.str "Claude error: %s" (Claude.Response.Error.message e))) | _ -> ()) responses; match !result with | Some r -> r | None -> Ok markdown (* fallback to original *) (* Simple string containment check *) let string_contains_s haystack needle = let hlen = String.length haystack in let nlen = String.length needle in if nlen > hlen then false else begin let rec check i = if i > hlen - nlen then false else if String.sub haystack i nlen = needle then true else check (i + 1) in check 0 end (* Infer change type from summary text *) let infer_change_type summary = let summary_lower = String.lowercase_ascii summary in if String.starts_with ~prefix:"initial import" summary_lower || String.starts_with ~prefix:"added as subtree" summary_lower || String.starts_with ~prefix:"added" summary_lower && String.ends_with ~suffix:"library" summary_lower then Changes_aggregated.New_library else if List.exists (fun kw -> string_contains_s summary_lower kw) [ "fix"; "bugfix"; "bug fix"; "repair"; "patch"; "resolve"; "correct" ] then Changes_aggregated.Bugfix else if List.exists (fun kw -> string_contains_s summary_lower kw) [ "refactor"; "cleanup"; "clean up"; "reorganize"; "restructure"; "simplify"; ] then Changes_aggregated.Refactor else if List.exists (fun kw -> string_contains_s summary_lower kw) [ "doc"; "documentation"; "readme"; "comment"; "tutorial"; "guide" ] then Changes_aggregated.Documentation else if List.exists (fun kw -> string_contains_s summary_lower kw) [ "add"; "new"; "feature"; "implement"; "support"; "introduce"; "enable" ] then Changes_aggregated.Feature else Changes_aggregated.Unknown let load_daily_entries_for_date changes_dir daily_files date_suffix_len = List.concat_map (fun filename -> let repo_name = String.sub filename 0 (String.length filename - date_suffix_len) in let path = Eio.Path.(changes_dir / filename) in try let content = Eio.Path.load path in match Json.of_string daily_file_jsont content with | Ok dcf -> List.filter_map (fun (e : daily_entry) -> if e.changes <> [] then Some (repo_name, e) else None) dcf.entries | Error _ -> [] with Eio.Io _ -> []) daily_files let daily_entry_to_aggregated (repo_name, (e : daily_entry)) = let change_type = infer_change_type e.summary in Changes_aggregated. { repository = repo_name; hour = e.hour; timestamp = e.timestamp; summary = e.summary; changes = e.changes; commit_range = { from_hash = e.commit_range.from_hash; to_hash = e.commit_range.to_hash; count = e.commit_range.count; }; contributors = e.contributors; repo_url = e.repo_url; change_type; } (** Generate an aggregated daily file from individual daily json files. This creates a YYYYMMDD.json file in the .changes directory. *) let generate_aggregated ~fs ~monorepo ~date ~git_head ~now = let changes_dir = Eio.Path.(fs / Fpath.to_string monorepo / ".changes") in let files = try Eio.Path.read_dir changes_dir with Eio.Io _ -> [] in let date_suffix = "-" ^ date ^ ".json" in let date_suffix_len = String.length date_suffix in let daily_files = List.filter (fun f -> String.ends_with ~suffix:date_suffix f && String.length f > date_suffix_len) files in let entries = load_daily_entries_for_date changes_dir daily_files date_suffix_len in let agg_entries = List.map daily_entry_to_aggregated entries in let authors = entries |> List.concat_map (fun (_, (e : daily_entry)) -> e.contributors) |> List.sort_uniq String.compare in let aggregated : Changes_aggregated.t = { date; generated_at = now; git_head; entries = agg_entries; authors } in let changes_dir_fpath = Fpath.(v (Fpath.to_string monorepo) / ".changes") in Changes_aggregated.save ~fs ~changes_dir:changes_dir_fpath aggregated