Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

iq based exporter, using cargo json data #275

Merged
main from
Dec 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
iq based exporter, using cargo json data
  • Loading branch information
Canop committed Dec 9, 2024
commit 3aff49d3d22dc4aeb0f5aa6f7c6d8d1feac17ee0
12 changes: 12 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ crokey = "1.1"
directories-next = "2.0.0"
gix = { version = "0.67", default-features = false, features = ["index", "excludes", "parallel"] }
glob = "0.3"
iq = { version = "0.2", features = ["template"] }
lazy-regex = "3.3"
notify = "7.0"
rustc-hash = "2"
Expand All @@ -42,4 +43,5 @@ codegen-units = 1
# termimad = { path = "../termimad" }
# crokey = { path = "../crokey" }
# coolor = { path = "../coolor" }
# iq = { path = "../iq" }
# lazy-regex = { path = "../lazy-regex" }
8 changes: 8 additions & 0 deletions bacon.toml
Original file line number Diff line number Diff line change
Expand Up @@ -76,3 +76,11 @@ need_stdout = false

[keybindings]
c = "job:clippy-all"

# an example of export based on the JSON export, active when
# the analyzer is "cargo_json"
[exports.cargo-json-spans]
auto = false
exporter = "analyzer"
path = "bacon-analyser-export.json"
line_format = "{span.file_name} {span.line_start}-{span.line_end} | {diagnostic.message} [{diagnostic.code}] level={diagnostic.level}"
23 changes: 10 additions & 13 deletions defaults/default-prefs.toml
Original file line number Diff line number Diff line change
@@ -1,21 +1,25 @@
# This is a preferences file for the bacon tool
# More info at https://github.com/Canop/bacon


# Uncomment and change the value (true/false) to
# specify whether bacon should start in summary mode
#
# summary = true


# Uncomment and change the value (true/false) to
# specify whether bacon should start with lines wrapped
#
# wrap = false


# In "reverse" mode, the focus is at the bottom, item
# order is reversed, and the status bar is on top
#
# reverse = true


# The grace period is a delay after a file event before the real
# task is launched and during which other events will be ignored.
# This is useful if several events are often sent quasi-simultaneously
Expand All @@ -24,30 +28,27 @@
#
# grace_period = "5ms"


# Uncomment and change the value (true/false) to
# specify whether bacon should show a help line.
#
# help_line = false


# Uncomment and change the value (true/false) to
# set whether to display the count of changes since last job start
#
# show_changes_count = false


# Uncomment one of those lines if you don't want the default
# behavior triggered by a file change. This property can also
# be set directly in a specific job.
#
# on_change_strategy = "kill_then_restart"
# on_change_strategy = "wait_then_restart"

# Exports can be executed either
# - on each job completion (if auto is true)
# - or called on a key (eg `ctrl-e = "export:analysis"`)
#
# They can be used for communication with external tools
# or for inquiries.
#

# Exporting "locations" (by setting its 'auto' to true) lets you use
# them in an external tool, for example as a list of jump locations
# in an IDE or in a language server.
Expand All @@ -62,14 +63,10 @@
# - context: unstyled lines of output, separated with escaped newlines (`\\n`)
[exports.locations]
auto = false
exporter = "locations"
path = ".bacon-locations"
line_format = "{kind} {path}:{line}:{column} {message}"
[exports.json-report]
auto = false
path = "bacon-report.json"
[exports.analysis]
auto = false
path = "bacon-analysis.json"


# Uncomment and change the key-bindings you want to define
# (some of those ones are the defaults and are just here for illustration)
Expand Down
49 changes: 49 additions & 0 deletions src/analysis/cargo_json/cargo_json_export.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
use {
crate::*,
cargo_metadata::diagnostic::{
Diagnostic,
DiagnosticSpan,
},
serde::Serialize,
};

/// An export in progress for the cargo_json analyzer
pub struct CargoJsonExport {
pub name: String,
/// The written data to write to the file
pub export: String,
pub line_template: iq::Template,
}

/// The data provided to the template, once per span
#[derive(Debug, Clone, Serialize)]
struct OnSpanData<'d> {
diagnostic: &'d Diagnostic,
span: &'d DiagnosticSpan,
}

impl CargoJsonExport {
pub fn new(
name: String,
settings: &ExportSettings,
) -> Self {
Self {
name,
export: String::new(),
line_template: iq::Template::new(&settings.line_format),
}
}
pub fn receive_diagnostic(
&mut self,
diagnostic: &Diagnostic,
) {
for span in &diagnostic.spans {
let data = OnSpanData { diagnostic, span };
let line = self.line_template.render(&data);
if !line.is_empty() {
self.export.push_str(&line);
self.export.push('\n');
}
}
}
}
24 changes: 21 additions & 3 deletions src/analysis/cargo_json/mod.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
mod cargo_json_export;

use {
super::*,
crate::{
analysis::standard,
*,
},
anyhow::Result,
cargo_json_export::*,
cargo_metadata::{
Message,
diagnostic::Diagnostic,
Expand All @@ -21,17 +24,25 @@ use {
///
/// There are so many problems with this approach, though, that I'm
/// not sure this is worth it.
#[derive(Debug, Default)]
#[derive(Default)]
pub struct CargoJsonAnalyzer {
lines: Vec<CommandOutputLine>,
exports: Vec<CargoJsonExport>,
}

impl Analyzer for CargoJsonAnalyzer {
fn start(
&mut self,
_mission: &Mission,
mission: &Mission,
) {
self.lines.clear();
self.exports.clear();
for (name, export_settings) in &mission.settings.exports.exports {
if export_settings.exporter == Exporter::Analyser {
let export = CargoJsonExport::new(name.clone(), export_settings);
self.exports.push(export);
}
}
}

fn receive_line(
Expand Down Expand Up @@ -59,7 +70,11 @@ impl Analyzer for CargoJsonAnalyzer {

fn build_report(&mut self) -> Result<Report> {
let line_analyzer = standard::StandardLineAnalyzer {};
standard::build_report(&self.lines, line_analyzer)
let mut report = standard::build_report(&self.lines, line_analyzer)?;
for export in self.exports.drain(..) {
report.analyzer_exports.insert(export.name, export.export);
}
Ok(report)
}
}

Expand Down Expand Up @@ -89,6 +104,9 @@ impl CargoJsonAnalyzer {
origin: CommandStream,
command_output: &mut CommandOutput,
) {
for export in &mut self.exports {
export.receive_diagnostic(&diagnostic);
}
let Diagnostic {
//message,
//code,
Expand Down
1 change: 1 addition & 0 deletions src/analysis/item_accumulator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ impl ItemAccumulator {
suggest_backtrace: false,
output: Default::default(),
failure_keys: Vec::new(),
analyzer_exports: Default::default(),
}
}
}
3 changes: 3 additions & 0 deletions src/analysis/standard/standard_line_analyser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,9 @@ fn analyze_line(cmd_line: &CommandOutputLine) -> LineAnalysis {
("", title_raw, CSI_BOLD_BLUE, "--> ") if is_spaces(title_raw) => {
LineType::Location
}
("", title_raw, CSI_BOLD_BLUE, "::: ") if is_spaces(title_raw) => {
LineType::Location
}
("", k, CSI_BOLD_RED | CSI_RED, "FAILED") if content.strings.len() == 2 => {
if let Some(k) = as_test_name(k) {
key = Some(k.to_string());
Expand Down
1 change: 1 addition & 0 deletions src/analysis/standard/standard_report_building.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ pub fn build_report<L: LineAnalyzer>(
suggest_backtrace,
output: Default::default(),
failure_keys,
analyzer_exports: Default::default(),
};
Ok(report)
}
20 changes: 12 additions & 8 deletions src/export/export_settings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ pub struct ExportSettings {
impl ExportSettings {
pub fn do_export(
&self,
name: &str,
state: &AppState<'_>,
) -> anyhow::Result<()> {
let path = if self.path.is_relative() {
Expand All @@ -25,23 +26,26 @@ impl ExportSettings {
self.path.to_path_buf()
};
info!("exporting to {:?}", path);
let Some(report) = state.cmd_result.report() else {
info!("No report to export");
return Ok(());
};
match self.exporter {
Exporter::Analyser => {
if let Some(export) = report.analyzer_exports.get(name) {
std::fs::write(&path, export)?;
} else {
info!("Analyzer didn't build export {:?}", name);
}
}
Exporter::Analysis => {
error!("Aanlysis export not currently implemented");
}
Exporter::JsonReport => {
let Some(report) = state.cmd_result.report() else {
info!("No report to export");
return Ok(());
};
let json = serde_json::to_string_pretty(&report)?;
std::fs::write(&path, json)?;
}
Exporter::Locations => {
let Some(report) = state.cmd_result.report() else {
info!("No report to export");
return Ok(());
};
let mut file = File::create(path)?;
report.write_locations(&mut file, &state.mission, &self.line_format)?;
}
Expand Down
9 changes: 8 additions & 1 deletion src/export/exporter.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
use serde::Deserialize;

#[derive(Debug, Clone, Copy, Deserialize)]
#[derive(Debug, Clone, Copy, PartialEq, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum Exporter {
/// The analyzer is tasked with doing an export while analyzing the
/// command output
#[serde(alias = "analyzer")]
Analyser,
/// This exporter doesn't exist at the moment
#[serde(alias = "analyzis")]
Analysis,
JsonReport,
Locations,
Expand Down
8 changes: 6 additions & 2 deletions src/export/exports_settings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ impl ExportsSettings {
for (name, export) in &self.exports {
if export.auto {
info!("doing auto export {:?}", name);
if let Err(e) = export.do_export(state) {
if let Err(e) = export.do_export(name, state) {
error!("error while exporting {:?}: {:?}", name, e);
}
}
Expand All @@ -44,7 +44,7 @@ impl ExportsSettings {
state: &AppState<'_>,
) {
if let Some(export) = self.exports.get(requested_name) {
if let Err(e) = export.do_export(state) {
if let Err(e) = export.do_export(requested_name, state) {
error!("error while exporting {:?}: {:?}", requested_name, e);
}
} else {
Expand Down Expand Up @@ -99,6 +99,7 @@ impl ExportsSettings {
};
let auto = ec.auto.unwrap_or(true);
let path = ec.path.clone().unwrap_or_else(|| match exporter {
Exporter::Analyser => default_analyser_path(),
Exporter::Analysis => default_analysis_path(),
Exporter::Locations => default_locations_path(),
Exporter::JsonReport => default_json_report_path(),
Expand Down Expand Up @@ -201,6 +202,9 @@ pub fn default_locations_line_format() -> &'static str {
"{kind} {path}:{line}:{column} {message}"
}

pub fn default_analyser_path() -> PathBuf {
PathBuf::from("bacon-analyser.json")
}
pub fn default_analysis_path() -> PathBuf {
PathBuf::from("bacon-analysis.json")
}
Expand Down
3 changes: 3 additions & 0 deletions src/result/report.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use {
Serialize,
},
std::{
collections::HashMap,
io,
path::PathBuf,
},
Expand All @@ -21,6 +22,8 @@ pub struct Report {
pub suggest_backtrace: bool,
pub output: CommandOutput,
pub failure_keys: Vec<String>,
/// the exports that the analyzers have done, by name
pub analyzer_exports: HashMap<String, String>,
}

impl Report {
Expand Down