Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,8 @@ sources:
...
# Target specifier. Optional.
target: <target specifier>
# Optional setting to override other files in any source that have the same file basename.
override_files: true
# Recursive list of source files and groups:
files:
- <file or group 1>
Expand Down Expand Up @@ -384,6 +386,32 @@ Do not start the target name with `-`, as this is used to remove target applicat

[Relevant code](https://github.com/pulp-platform/bender/blob/master/src/target.rs)


### Override Files
If the `override_files` setting is applied to a source, then any files in that source will override other files that share the same basename. The overridden file will be removed from the output and replaced with the overriding file. For example, if `override_files` is applied to a source that has the file `src/core/pkg.sv`, then any other files that are also `pkg.sv` but in a different path will be removed and replaced with `src/core/pkg.sv`. If a file in an override files source does not override any other file, it will not be present in the output.


#### Example:
```yaml
sources:
- files:
- src/core/pkg.sv
- src/core/alu.sv
- src/core/top.sv
- target: custom_pkg
override_files: true
files:
- src/custom/pkg.sv
- src/custom/adder.sv
```
If Bender is run with the `custom_pkg` target, the output files will be:

```
src/custom/pkg.sv
src/core/alu.sv
src/core/top.sv
```

### Vendor

Section to list files and directories copied and patched within this repository from external repositories not supporting bender.
Expand Down
93 changes: 84 additions & 9 deletions src/cmd/script.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
//! The `script` subcommand.

use std::io::Write;
use std::path::PathBuf;
use std::path::{Path, PathBuf};

use clap::{ArgAction, Args, Subcommand, ValueEnum};
use indexmap::{IndexMap, IndexSet};
Expand Down Expand Up @@ -35,7 +35,7 @@ pub struct ScriptArgs {
pub define: Vec<String>,

/// Remove source annotations from the generated script
#[arg(long, help_heading = "General Script Options")]
#[arg(long, global = true, help_heading = "General Script Options")]
pub no_source_annotations: bool,

/// Specify package to show sources for
Expand Down Expand Up @@ -388,7 +388,7 @@ pub fn run(sess: &Session, args: &ScriptArgs) -> Result<()> {

/// Subdivide the source files in a group.
///
/// The function `cateogrize` is used to assign a category to each source file.
/// The function `categorize` is used to assign a category to each source file.
/// Files with the same category that appear after each other will be kept in
/// the same source group. Files with different cateogries are split into
/// separate groups.
Expand Down Expand Up @@ -455,17 +455,27 @@ fn emit_template(
let mut all_files = IndexSet::new();
let mut all_verilog = vec![];
let mut all_vhdl = vec![];
let mut all_override_files: IndexSet<(&Path, &str)> = IndexSet::new();
for src in &srcs {
all_defines.extend(
src.defines
.iter()
.map(|(k, &v)| (k.to_string(), v.map(String::from))),
);
all_incdirs.append(&mut src.clone().get_incdirs());
all_files.extend(src.files.iter().filter_map(|file| match file {
SourceFile::File(p, _) => Some(p.to_string_lossy().to_string()),
SourceFile::Group(_) => None,
}));

// If override_files is set, source files are not automatically included, only to replace files with matching basenames.
if src.override_files {
all_override_files.extend(src.files.iter().filter_map(|file| match file {
SourceFile::File(p, _) => Some((*p, src.package.unwrap_or("None"))),
SourceFile::Group(_) => None,
}));
} else {
all_files.extend(src.files.iter().filter_map(|file| match file {
SourceFile::File(p, _) => Some((*p, None::<String>)),
SourceFile::Group(_) => None,
}));
}
}

add_defines(&mut all_defines, &args.define);
Expand All @@ -485,12 +495,52 @@ fn emit_template(
};
tera_context.insert("all_incdirs", &all_incdirs);

// replace files in all_files with override files
let override_map = all_override_files
.iter()
.map(|(f, pkg)| {
(
f.file_name()
.and_then(std::ffi::OsStr::to_str)
.unwrap_or(""),
(*f, pkg),
)
})
.collect::<IndexMap<_, _>>();
let all_files = all_files
.into_iter()
.map(|file| {
let basename = file
.0
.file_name()
.and_then(std::ffi::OsStr::to_str)
.unwrap_or("");
match override_map.get(&basename) {
Some((new_path, pkg)) => FileEntry {
file: new_path.to_path_buf(),
comment: Some(format!(
"OVERRIDDEN from {}: {}",
pkg,
file.0.to_string_lossy()
)),
},
None => FileEntry {
file: file.0.to_path_buf(),
comment: file.1,
},
}
})
.collect::<IndexSet<_>>();

if emit_sources {
tera_context.insert("all_files", &all_files);
}

let mut split_srcs = vec![];
for src in srcs {
if src.override_files {
continue;
}
separate_files_in_group(
src,
|f| match f {
Expand Down Expand Up @@ -536,7 +586,26 @@ fn emit_template(
files: files
.iter()
.map(|f| match f {
SourceFile::File(p, _) => p.to_path_buf(),
SourceFile::File(p, _) => {
let basename = p
.file_name()
.and_then(std::ffi::OsStr::to_str)
.unwrap_or("");
match override_map.get(&basename) {
Some((new_path, pkg)) => FileEntry {
file: new_path.to_path_buf(),
comment: Some(format!(
"OVERRIDDEN from {}: {}",
pkg,
p.to_string_lossy()
)),
},
None => FileEntry {
file: p.to_path_buf(),
comment: None,
},
}
}
SourceFile::Group(_) => unreachable!(),
})
.collect(),
Expand Down Expand Up @@ -595,11 +664,17 @@ fn emit_template(
Ok(())
}

#[derive(Debug, Serialize, Hash, Eq, PartialEq, Clone)]
struct FileEntry {
file: PathBuf,
comment: Option<String>,
}

#[derive(Debug, Serialize)]
struct TplSrcStruct {
metadata: String,
defines: IndexSet<(String, Option<String>)>,
incdirs: IndexSet<PathBuf>,
files: IndexSet<PathBuf>,
files: IndexSet<FileEntry>,
file_type: String,
}
22 changes: 19 additions & 3 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,8 @@ pub struct Sources {
pub defines: IndexMap<String, Option<String>>,
/// The source files.
pub files: Vec<SourceFile>,
/// The files in this source will override other files.
pub override_files: bool,
}

impl PrefixPaths for Sources {
Expand Down Expand Up @@ -617,6 +619,7 @@ impl Validate for PartialManifest {
include_dirs: Vec::new(),
defines: IndexMap::new(),
files: vec![srcs.unwrap()],
override_files: false,
}),
None => None,
},
Expand Down Expand Up @@ -869,6 +872,8 @@ pub struct PartialSources {
pub vhd: Option<String>,
/// The list of external flists to include.
pub external_flists: Option<Vec<String>>,
/// The files in this source will override other files.
pub override_files: Option<bool>,
/// Unknown extra fields
#[serde(flatten)]
extra: HashMap<String, Value>,
Expand Down Expand Up @@ -922,6 +927,7 @@ impl Validate for PartialSources {
v: None,
vhd: None,
external_flists: None,
override_files: None,
extra: _,
} => PartialSourceFile::SvFile(sv).validate(vctx),
PartialSources {
Expand All @@ -933,6 +939,7 @@ impl Validate for PartialSources {
v: Some(v),
vhd: None,
external_flists: None,
override_files: None,
extra: _,
} => PartialSourceFile::VerilogFile(v).validate(vctx),
PartialSources {
Expand All @@ -944,6 +951,7 @@ impl Validate for PartialSources {
v: None,
vhd: Some(vhd),
external_flists: None,
override_files: None,
extra: _,
} => PartialSourceFile::VhdlFile(vhd).validate(vctx),
PartialSources {
Expand All @@ -955,6 +963,7 @@ impl Validate for PartialSources {
v: None,
vhd: None,
external_flists,
override_files,
extra,
} => {
let external_flists: Result<Vec<_>> = external_flists
Expand Down Expand Up @@ -1153,7 +1162,7 @@ impl Validate for PartialSources {
.flatten()
.collect::<Vec<_>>();

let include_dirs: Result<Vec<_>> = include_dirs
let include_dirs = include_dirs
.unwrap_or_default()
.iter()
.filter_map(|path| match env_path_from_string(path.to_string()) {
Expand All @@ -1170,7 +1179,7 @@ impl Validate for PartialSources {
}
}
})
.collect();
.collect::<Result<Vec<_>>>()?;

let defines = defines.unwrap_or_default();
let files: Result<Vec<_>> = post_glob_files
Expand All @@ -1191,11 +1200,17 @@ impl Validate for PartialSources {
.emit();
});
}
if override_files.is_some_and(|x| x)
&& (!include_dirs.is_empty() || !defines.is_empty())
{
Warnings::OverrideFilesWithExtras(vctx.package_name.to_string()).emit();
}
Ok(SourceFile::Group(Box::new(Sources {
target: target.unwrap_or_default(),
include_dirs: include_dirs?,
include_dirs,
defines,
files,
override_files: override_files.is_some_and(|x| x),
})))
}
PartialSources {
Expand All @@ -1207,6 +1222,7 @@ impl Validate for PartialSources {
v: _v,
vhd: _vhd,
external_flists: None,
override_files: None,
extra: _,
} => Err(Error::new(
"Only a single source with a single type is supported.",
Expand Down
4 changes: 4 additions & 0 deletions src/diagnostic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -361,6 +361,10 @@ pub enum Warnings {
)]
LfsDisabled(String),

#[error("Override files in {} does not support additional fields like include_dirs, defines, etc.", fmt_pkg!(.0))]
#[diagnostic(code(W28))]
OverrideFilesWithExtras(String),

#[error("File not added, ignoring: {cause}")]
#[diagnostic(code(W30))]
IgnoredPath { cause: String },
Expand Down
12 changes: 7 additions & 5 deletions src/script_fmt/flist-plus.tera
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,16 @@
#}// {{ file_group.metadata }}
{% endif %}{#
#}{% for file in file_group.files %}{# loop over all files
#}{% if relativize_path %}{# make path relative if necessary
#}{% if file is starting_with(root) %}{# keep path unless it starts with common root
#}{{ file | replace(from=root, to='') | trim_start_matches(pat='/') }}
#}{% if source_annotations %}{% if file.comment %}{# add file-specific comment
#}// {{ file.comment }}
{% endif %}{% endif %}{% if relativize_path %}{# make path relative if necessary
#}{% if file.file is starting_with(root) %}{# keep path unless it starts with common root
#}{{ file.file | replace(from=root, to='') | trim_start_matches(pat='/') }}
{% else %}{#
#}{{ file }}
#}{{ file.file }}
{% endif %}{#
#}{% else %}{#
#}{{ file }}
#}{{ file.file }}
{% endif %}{#
#}{% endfor %}{#
#}{% endfor %}
18 changes: 10 additions & 8 deletions src/script_fmt/flist.tera
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
{% for file_group in srcs %}{# loop over all file groups
#}{% if source_annotations %}{# Add source annotations
{% for file_group in srcs %}{# loop over all file groups
#}{% if source_annotations %}{# Add source annotations
#}// {{ file_group.metadata }}
{% endif %}{#
#}{% for file in file_group.files %}{# loop over all files
#}{% if relativize_path %}{# make path relative if necessary
#}{% if file is starting_with(root) %}{# keep path unless it starts with common root
#}{{ file | replace(from=root, to='') | trim_start_matches(pat='/') }}
#}{% for file in file_group.files %}{# loop over all files
#}{% if source_annotations %}{% if file.comment %}{# add file-specific comment
#}// {{ file.comment }}
{% endif %}{% endif %}{% if relativize_path %}{# make path relative if necessary
#}{% if file.file is starting_with(root) %}{# keep path unless it starts with common root
#}{{ file.file | replace(from=root, to='') | trim_start_matches(pat='/') }}
{% else %}{#
#}{{ file }}
#}{{ file.file }}
{% endif %}{#
#}{% else %}{#
#}{{ file }}
#}{{ file.file }}
{% endif %}{#
#}{% endfor %}{#
#}{% endfor %}
9 changes: 6 additions & 3 deletions src/script_fmt/formality_tcl.tera
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ set search_path $search_path_initial
} \
{% else %} \
{% endif %}{% endfor %}[list \
{% for file in group.files %}{{ ' ' }}"{{ file | replace(from=root, to='$ROOT') }}" \
{% for file in group.files %}{% if source_annotations %}{% if file.comment %}{{ ' ' }}# {{ file.comment }}
{% endif %}{% endif %}{{ ' ' }}"{{ file.file | replace(from=root, to='$ROOT') }}" \
{% endfor %}]
{% if abort_on_error %}}]} {return 1}{% endif %}
{% endfor %}
Expand All @@ -25,13 +26,15 @@ set search_path $search_path_initial
} \
{% else %} \
{% endif %}{% endfor %}[list \
{% endif %}{{ ' ' }}"{{ file | replace(from=root, to='$ROOT') }}" \
{% endif %}{% if source_annotations %}{% if file.comment %}{{ ' ' }}# {{ file.comment }}
{% endif %}{% endif %}{{ ' ' }}"{{ file.file | replace(from=root, to='$ROOT') }}" \
{% if loop.last %}]
{% if abort_on_error %}}]} {return 1}{% endif %}
{% endif %}{% endfor %}
{% for file in all_vhdl %}{% if loop.first %}{% if abort_on_error %}if {[catch { {% endif %}read_vhdl -r \
[list \
{% endif %}{{ ' ' }}"{{ file | replace(from=root, to='$ROOT') }}" \
{% endif %}{% if source_annotations %}{% if file.comment %}{{ ' ' }}# {{ file.comment }}
{% endif %}{% endif %}{{ ' ' }}"{{ file.file | replace(from=root, to='$ROOT') }}" \
{% if loop.last %}]
{% if abort_on_error %}}]} {return 1}{% endif %}
{% endif %}{% endfor %}
Expand Down
Loading
Loading