summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/xtask
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools/rust-analyzer/xtask')
-rw-r--r--src/tools/rust-analyzer/xtask/Cargo.toml1
-rw-r--r--src/tools/rust-analyzer/xtask/src/dist.rs52
-rw-r--r--src/tools/rust-analyzer/xtask/src/flags.rs15
-rw-r--r--src/tools/rust-analyzer/xtask/src/install.rs2
-rw-r--r--src/tools/rust-analyzer/xtask/src/main.rs2
-rw-r--r--src/tools/rust-analyzer/xtask/src/metrics.rs2
-rw-r--r--src/tools/rust-analyzer/xtask/src/publish.rs109
-rw-r--r--src/tools/rust-analyzer/xtask/src/publish/notes.rs631
-rw-r--r--src/tools/rust-analyzer/xtask/src/release.rs4
-rw-r--r--src/tools/rust-analyzer/xtask/src/release/changelog.rs30
-rw-r--r--src/tools/rust-analyzer/xtask/test_data/expected.md81
-rw-r--r--src/tools/rust-analyzer/xtask/test_data/input.adoc90
12 files changed, 991 insertions, 28 deletions
diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml
index 95e27beab..2dd01796c 100644
--- a/src/tools/rust-analyzer/xtask/Cargo.toml
+++ b/src/tools/rust-analyzer/xtask/Cargo.toml
@@ -12,4 +12,5 @@ flate2 = "1.0.24"
write-json = "0.1.2"
xshell = "0.2.2"
xflags = "0.3.0"
+zip = { version = "0.6", default-features = false, features = ["deflate", "time"] }
# Avoid adding more dependencies to this crate
diff --git a/src/tools/rust-analyzer/xtask/src/dist.rs b/src/tools/rust-analyzer/xtask/src/dist.rs
index 686aec4ae..74715c53e 100644
--- a/src/tools/rust-analyzer/xtask/src/dist.rs
+++ b/src/tools/rust-analyzer/xtask/src/dist.rs
@@ -1,12 +1,13 @@
use std::{
env,
fs::File,
- io,
+ io::{self, BufWriter},
path::{Path, PathBuf},
};
use flate2::{write::GzEncoder, Compression};
use xshell::{cmd, Shell};
+use zip::{write::FileOptions, DateTime, ZipWriter};
use crate::{date_iso, flags, project_root};
@@ -26,10 +27,10 @@ impl flags::Dist {
if let Some(patch_version) = self.client_patch_version {
let version = if stable {
- format!("{}.{}", VERSION_STABLE, patch_version)
+ format!("{VERSION_STABLE}.{patch_version}")
} else {
// A hack to make VS Code prefer nightly over stable.
- format!("{}.{}", VERSION_NIGHTLY, patch_version)
+ format!("{VERSION_NIGHTLY}.{patch_version}")
};
dist_server(sh, &format!("{version}-standalone"), &target)?;
let release_tag = if stable { date_iso(sh)? } else { "nightly".to_string() };
@@ -59,10 +60,10 @@ fn dist_client(
let mut patch = Patch::new(sh, "./package.json")?;
patch
.replace(
- &format!(r#""version": "{}.0-dev""#, VERSION_DEV),
- &format!(r#""version": "{}""#, version),
+ &format!(r#""version": "{VERSION_DEV}.0-dev""#),
+ &format!(r#""version": "{version}""#),
)
- .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{}""#, release_tag))
+ .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{release_tag}""#))
.replace(r#""$generated-start": {},"#, "")
.replace(",\n \"$generated-end\": {}", "")
.replace(r#""enabledApiProposals": [],"#, r#""#);
@@ -89,6 +90,9 @@ fn dist_server(sh: &Shell, release: &str, target: &Target) -> anyhow::Result<()>
let dst = Path::new("dist").join(&target.artifact_name);
gzip(&target.server_path, &dst.with_extension("gz"))?;
+ if target_name.contains("-windows-") {
+ zip(&target.server_path, target.symbols_path.as_ref(), &dst.with_extension("zip"))?;
+ }
Ok(())
}
@@ -101,6 +105,38 @@ fn gzip(src_path: &Path, dest_path: &Path) -> anyhow::Result<()> {
Ok(())
}
+fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> anyhow::Result<()> {
+ let file = File::create(dest_path)?;
+ let mut writer = ZipWriter::new(BufWriter::new(file));
+ writer.start_file(
+ src_path.file_name().unwrap().to_str().unwrap(),
+ FileOptions::default()
+ .last_modified_time(
+ DateTime::from_time(std::fs::metadata(src_path)?.modified()?.into()).unwrap(),
+ )
+ .unix_permissions(0o755)
+ .compression_method(zip::CompressionMethod::Deflated)
+ .compression_level(Some(9)),
+ )?;
+ let mut input = io::BufReader::new(File::open(src_path)?);
+ io::copy(&mut input, &mut writer)?;
+ if let Some(symbols_path) = symbols_path {
+ writer.start_file(
+ symbols_path.file_name().unwrap().to_str().unwrap(),
+ FileOptions::default()
+ .last_modified_time(
+ DateTime::from_time(std::fs::metadata(src_path)?.modified()?.into()).unwrap(),
+ )
+ .compression_method(zip::CompressionMethod::Deflated)
+ .compression_level(Some(9)),
+ )?;
+ let mut input = io::BufReader::new(File::open(symbols_path)?);
+ io::copy(&mut input, &mut writer)?;
+ }
+ writer.finish()?;
+ Ok(())
+}
+
struct Target {
name: String,
server_path: PathBuf,
@@ -130,8 +166,8 @@ impl Target {
} else {
(String::new(), None)
};
- let server_path = out_path.join(format!("rust-analyzer{}", exe_suffix));
- let artifact_name = format!("rust-analyzer-{}{}", name, exe_suffix);
+ let server_path = out_path.join(format!("rust-analyzer{exe_suffix}"));
+ let artifact_name = format!("rust-analyzer-{name}{exe_suffix}");
Self { name, server_path, symbols_path, artifact_name }
}
}
diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs
index 0fce48898..210047970 100644
--- a/src/tools/rust-analyzer/xtask/src/flags.rs
+++ b/src/tools/rust-analyzer/xtask/src/flags.rs
@@ -34,6 +34,13 @@ xflags::xflags! {
cmd dist {
optional --client-patch-version version: String
}
+ /// Read a changelog AsciiDoc file and update the GitHub Releases entry in Markdown.
+ cmd publish-release-notes {
+ /// Only run conversion and show the result.
+ optional --dry-run
+ /// Target changelog file.
+ required changelog: String
+ }
cmd metrics {
optional --dry-run
}
@@ -59,6 +66,7 @@ pub enum XtaskCmd {
Release(Release),
Promote(Promote),
Dist(Dist),
+ PublishReleaseNotes(PublishReleaseNotes),
Metrics(Metrics),
Bb(Bb),
}
@@ -91,6 +99,13 @@ pub struct Dist {
}
#[derive(Debug)]
+pub struct PublishReleaseNotes {
+ pub changelog: String,
+
+ pub dry_run: bool,
+}
+
+#[derive(Debug)]
pub struct Metrics {
pub dry_run: bool,
}
diff --git a/src/tools/rust-analyzer/xtask/src/install.rs b/src/tools/rust-analyzer/xtask/src/install.rs
index ae978d551..83223a551 100644
--- a/src/tools/rust-analyzer/xtask/src/install.rs
+++ b/src/tools/rust-analyzer/xtask/src/install.rs
@@ -62,7 +62,7 @@ fn fix_path_for_mac(sh: &Shell) -> Result<()> {
let mut paths = env::split_paths(&vars).collect::<Vec<_>>();
paths.append(&mut vscode_path);
let new_paths = env::join_paths(paths).context("build env PATH")?;
- sh.set_var("PATH", &new_paths);
+ sh.set_var("PATH", new_paths);
}
Ok(())
diff --git a/src/tools/rust-analyzer/xtask/src/main.rs b/src/tools/rust-analyzer/xtask/src/main.rs
index a37f469ad..6a45033ad 100644
--- a/src/tools/rust-analyzer/xtask/src/main.rs
+++ b/src/tools/rust-analyzer/xtask/src/main.rs
@@ -15,6 +15,7 @@ mod flags;
mod install;
mod release;
mod dist;
+mod publish;
mod metrics;
use anyhow::bail;
@@ -36,6 +37,7 @@ fn main() -> anyhow::Result<()> {
flags::XtaskCmd::Release(cmd) => cmd.run(sh),
flags::XtaskCmd::Promote(cmd) => cmd.run(sh),
flags::XtaskCmd::Dist(cmd) => cmd.run(sh),
+ flags::XtaskCmd::PublishReleaseNotes(cmd) => cmd.run(sh),
flags::XtaskCmd::Metrics(cmd) => cmd.run(sh),
flags::XtaskCmd::Bb(cmd) => {
{
diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs
index ebeb87346..b6f730dbf 100644
--- a/src/tools/rust-analyzer/xtask/src/metrics.rs
+++ b/src/tools/rust-analyzer/xtask/src/metrics.rs
@@ -87,7 +87,7 @@ impl Metrics {
self.measure_analysis_stats_path(
sh,
bench,
- &format!("./target/rustc-perf/collector/benchmarks/{}", bench),
+ &format!("./target/rustc-perf/collector/benchmarks/{bench}"),
)
}
fn measure_analysis_stats_path(
diff --git a/src/tools/rust-analyzer/xtask/src/publish.rs b/src/tools/rust-analyzer/xtask/src/publish.rs
new file mode 100644
index 000000000..79b5f3d2f
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/publish.rs
@@ -0,0 +1,109 @@
+mod notes;
+
+use crate::flags;
+use anyhow::{anyhow, bail, Result};
+use std::env;
+use xshell::{cmd, Shell};
+
+impl flags::PublishReleaseNotes {
+ pub(crate) fn run(self, sh: &Shell) -> Result<()> {
+ let asciidoc = sh.read_file(&self.changelog)?;
+ let mut markdown = notes::convert_asciidoc_to_markdown(std::io::Cursor::new(&asciidoc))?;
+ let file_name = check_file_name(self.changelog)?;
+ let tag_name = &file_name[0..10];
+ let original_changelog_url = create_original_changelog_url(&file_name);
+ let additional_paragraph =
+ format!("\nSee also [original changelog]({original_changelog_url}).");
+ markdown.push_str(&additional_paragraph);
+ if self.dry_run {
+ println!("{markdown}");
+ } else {
+ update_release(sh, tag_name, &markdown)?;
+ }
+ Ok(())
+ }
+}
+
+fn check_file_name<P: AsRef<std::path::Path>>(path: P) -> Result<String> {
+ let file_name = path
+ .as_ref()
+ .file_name()
+ .ok_or_else(|| anyhow!("file name is not specified as `changelog`"))?
+ .to_string_lossy();
+
+ let mut chars = file_name.chars();
+ if file_name.len() >= 10
+ && chars.next().unwrap().is_ascii_digit()
+ && chars.next().unwrap().is_ascii_digit()
+ && chars.next().unwrap().is_ascii_digit()
+ && chars.next().unwrap().is_ascii_digit()
+ && chars.next().unwrap() == '-'
+ && chars.next().unwrap().is_ascii_digit()
+ && chars.next().unwrap().is_ascii_digit()
+ && chars.next().unwrap() == '-'
+ && chars.next().unwrap().is_ascii_digit()
+ && chars.next().unwrap().is_ascii_digit()
+ {
+ Ok(file_name.to_string())
+ } else {
+ bail!("unexpected file name format; no date information prefixed")
+ }
+}
+
+fn create_original_changelog_url(file_name: &str) -> String {
+ let year = &file_name[0..4];
+ let month = &file_name[5..7];
+ let day = &file_name[8..10];
+ let mut stem = &file_name[11..];
+ if let Some(stripped) = stem.strip_suffix(".adoc") {
+ stem = stripped;
+ }
+ format!("https://rust-analyzer.github.io/thisweek/{year}/{month}/{day}/{stem}.html")
+}
+
+fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> Result<()> {
+ let token = match env::var("GITHUB_TOKEN") {
+ Ok(token) => token,
+ Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."),
+ };
+ let accept = "Accept: application/vnd.github+json";
+ let authorization = format!("Authorization: Bearer {token}");
+ let api_version = "X-GitHub-Api-Version: 2022-11-28";
+ let release_url = "https://api.github.com/repos/rust-lang/rust-analyzer/releases";
+
+ let release_json = cmd!(
+ sh,
+ "curl -sf -H {accept} -H {authorization} -H {api_version} {release_url}/tags/{tag_name}"
+ )
+ .read()?;
+ let release_id = cmd!(sh, "jq .id").stdin(release_json).read()?;
+
+ let mut patch = String::new();
+ write_json::object(&mut patch)
+ .string("tag_name", tag_name)
+ .string("target_commitish", "master")
+ .string("name", tag_name)
+ .string("body", release_notes)
+ .bool("draft", false)
+ .bool("prerelease", false);
+ let _ = cmd!(
+ sh,
+ "curl -sf -X PATCH -H {accept} -H {authorization} -H {api_version} {release_url}/{release_id} -d {patch}"
+ )
+ .read()?;
+
+ Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn original_changelog_url_creation() {
+ let input = "2019-07-24-changelog-0.adoc";
+ let actual = create_original_changelog_url(input);
+ let expected = "https://rust-analyzer.github.io/thisweek/2019/07/24/changelog-0.html";
+ assert_eq!(actual, expected);
+ }
+}
diff --git a/src/tools/rust-analyzer/xtask/src/publish/notes.rs b/src/tools/rust-analyzer/xtask/src/publish/notes.rs
new file mode 100644
index 000000000..c30267295
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/publish/notes.rs
@@ -0,0 +1,631 @@
+use anyhow::{anyhow, bail};
+use std::{
+ borrow::Cow,
+ io::{BufRead, Lines},
+ iter::Peekable,
+};
+
+const LISTING_DELIMITER: &str = "----";
+const IMAGE_BLOCK_PREFIX: &str = "image::";
+const VIDEO_BLOCK_PREFIX: &str = "video::";
+
+struct Converter<'a, 'b, R: BufRead> {
+ iter: &'a mut Peekable<Lines<R>>,
+ output: &'b mut String,
+}
+
+impl<'a, 'b, R: BufRead> Converter<'a, 'b, R> {
+ fn new(iter: &'a mut Peekable<Lines<R>>, output: &'b mut String) -> Self {
+ Self { iter, output }
+ }
+
+ fn process(&mut self) -> anyhow::Result<()> {
+ self.process_document_header()?;
+ self.skip_blank_lines()?;
+ self.output.push('\n');
+
+ loop {
+ let line = self.iter.peek().unwrap().as_deref().map_err(|e| anyhow!("{e}"))?;
+ if get_title(line).is_some() {
+ let line = self.iter.next().unwrap().unwrap();
+ let (level, title) = get_title(&line).unwrap();
+ self.write_title(level, title);
+ } else if get_list_item(line).is_some() {
+ self.process_list()?;
+ } else if line.starts_with('[') {
+ self.process_source_code_block(0)?;
+ } else if line.starts_with(LISTING_DELIMITER) {
+ self.process_listing_block(None, 0)?;
+ } else if line.starts_with('.') {
+ self.process_block_with_title(0)?;
+ } else if line.starts_with(IMAGE_BLOCK_PREFIX) {
+ self.process_image_block(None, 0)?;
+ } else if line.starts_with(VIDEO_BLOCK_PREFIX) {
+ self.process_video_block(None, 0)?;
+ } else {
+ self.process_paragraph(0, |line| line.is_empty())?;
+ }
+
+ self.skip_blank_lines()?;
+ if self.iter.peek().is_none() {
+ break;
+ }
+ self.output.push('\n');
+ }
+ Ok(())
+ }
+
+ fn process_document_header(&mut self) -> anyhow::Result<()> {
+ self.process_document_title()?;
+
+ while let Some(line) = self.iter.next() {
+ let line = line?;
+ if line.is_empty() {
+ break;
+ }
+ if !line.starts_with(':') {
+ self.write_line(&line, 0)
+ }
+ }
+
+ Ok(())
+ }
+
+ fn process_document_title(&mut self) -> anyhow::Result<()> {
+ if let Some(Ok(line)) = self.iter.next() {
+ if let Some((level, title)) = get_title(&line) {
+ let title = process_inline_macros(title)?;
+ if level == 1 {
+ self.write_title(level, &title);
+ return Ok(());
+ }
+ }
+ }
+ bail!("document title not found")
+ }
+
+ fn process_list(&mut self) -> anyhow::Result<()> {
+ let mut nesting = ListNesting::new();
+ while let Some(line) = self.iter.peek() {
+ let line = line.as_deref().map_err(|e| anyhow!("{e}"))?;
+
+ if get_list_item(line).is_some() {
+ let line = self.iter.next().unwrap()?;
+ let line = process_inline_macros(&line)?;
+ let (marker, item) = get_list_item(&line).unwrap();
+ nesting.set_current(marker);
+ self.write_list_item(item, &nesting);
+ self.process_paragraph(nesting.indent(), |line| {
+ line.is_empty() || get_list_item(line).is_some() || line == "+"
+ })?;
+ } else if line == "+" {
+ let _ = self.iter.next().unwrap()?;
+ let line = self
+ .iter
+ .peek()
+ .ok_or_else(|| anyhow!("list continuation unexpectedly terminated"))?;
+ let line = line.as_deref().map_err(|e| anyhow!("{e}"))?;
+
+ let indent = nesting.indent();
+ if line.starts_with('[') {
+ self.write_line("", 0);
+ self.process_source_code_block(indent)?;
+ } else if line.starts_with(LISTING_DELIMITER) {
+ self.write_line("", 0);
+ self.process_listing_block(None, indent)?;
+ } else if line.starts_with('.') {
+ self.write_line("", 0);
+ self.process_block_with_title(indent)?;
+ } else if line.starts_with(IMAGE_BLOCK_PREFIX) {
+ self.write_line("", 0);
+ self.process_image_block(None, indent)?;
+ } else if line.starts_with(VIDEO_BLOCK_PREFIX) {
+ self.write_line("", 0);
+ self.process_video_block(None, indent)?;
+ } else {
+ self.write_line("", 0);
+ let current = nesting.current().unwrap();
+ self.process_paragraph(indent, |line| {
+ line.is_empty()
+ || get_list_item(line).filter(|(m, _)| m == current).is_some()
+ || line == "+"
+ })?;
+ }
+ } else {
+ break;
+ }
+ self.skip_blank_lines()?;
+ }
+
+ Ok(())
+ }
+
+ fn process_source_code_block(&mut self, level: usize) -> anyhow::Result<()> {
+ if let Some(Ok(line)) = self.iter.next() {
+ if let Some(styles) = line.strip_prefix("[source").and_then(|s| s.strip_suffix(']')) {
+ let mut styles = styles.split(',');
+ if !styles.next().unwrap().is_empty() {
+ bail!("not a source code block");
+ }
+ let language = styles.next();
+ return self.process_listing_block(language, level);
+ }
+ }
+ bail!("not a source code block")
+ }
+
+ fn process_listing_block(&mut self, style: Option<&str>, level: usize) -> anyhow::Result<()> {
+ if let Some(Ok(line)) = self.iter.next() {
+ if line == LISTING_DELIMITER {
+ self.write_indent(level);
+ self.output.push_str("```");
+ if let Some(style) = style {
+ self.output.push_str(style);
+ }
+ self.output.push('\n');
+ while let Some(line) = self.iter.next() {
+ let line = line?;
+ if line == LISTING_DELIMITER {
+ self.write_line("```", level);
+ return Ok(());
+ } else {
+ self.write_line(&line, level);
+ }
+ }
+ bail!("listing block is not terminated")
+ }
+ }
+ bail!("not a listing block")
+ }
+
+ fn process_block_with_title(&mut self, level: usize) -> anyhow::Result<()> {
+ if let Some(Ok(line)) = self.iter.next() {
+ let title =
+ line.strip_prefix('.').ok_or_else(|| anyhow!("extraction of the title failed"))?;
+
+ let line = self
+ .iter
+ .peek()
+ .ok_or_else(|| anyhow!("target block for the title is not found"))?;
+ let line = line.as_deref().map_err(|e| anyhow!("{e}"))?;
+ if line.starts_with(IMAGE_BLOCK_PREFIX) {
+ return self.process_image_block(Some(title), level);
+ } else if line.starts_with(VIDEO_BLOCK_PREFIX) {
+ return self.process_video_block(Some(title), level);
+ } else {
+ bail!("title for that block type is not supported");
+ }
+ }
+ bail!("not a title")
+ }
+
+ fn process_image_block(&mut self, caption: Option<&str>, level: usize) -> anyhow::Result<()> {
+ if let Some(Ok(line)) = self.iter.next() {
+ if let Some((url, attrs)) = parse_media_block(&line, IMAGE_BLOCK_PREFIX) {
+ let alt = if let Some(stripped) =
+ attrs.strip_prefix('"').and_then(|s| s.strip_suffix('"'))
+ {
+ stripped
+ } else {
+ attrs
+ };
+ if let Some(caption) = caption {
+ self.write_caption_line(caption, level);
+ }
+ self.write_indent(level);
+ self.output.push_str("![");
+ self.output.push_str(alt);
+ self.output.push_str("](");
+ self.output.push_str(url);
+ self.output.push_str(")\n");
+ return Ok(());
+ }
+ }
+ bail!("not a image block")
+ }
+
+ fn process_video_block(&mut self, caption: Option<&str>, level: usize) -> anyhow::Result<()> {
+ if let Some(Ok(line)) = self.iter.next() {
+ if let Some((url, attrs)) = parse_media_block(&line, VIDEO_BLOCK_PREFIX) {
+ let html_attrs = match attrs {
+ "options=loop" => "controls loop",
+ r#"options="autoplay,loop""# => "autoplay controls loop",
+ _ => bail!("unsupported video syntax"),
+ };
+ if let Some(caption) = caption {
+ self.write_caption_line(caption, level);
+ }
+ self.write_indent(level);
+ self.output.push_str(r#"<video src=""#);
+ self.output.push_str(url);
+ self.output.push_str(r#"" "#);
+ self.output.push_str(html_attrs);
+ self.output.push_str(">Your browser does not support the video tag.</video>\n");
+ return Ok(());
+ }
+ }
+ bail!("not a video block")
+ }
+
+ fn process_paragraph<P>(&mut self, level: usize, predicate: P) -> anyhow::Result<()>
+ where
+ P: Fn(&str) -> bool,
+ {
+ while let Some(line) = self.iter.peek() {
+ let line = line.as_deref().map_err(|e| anyhow!("{e}"))?;
+ if predicate(line) {
+ break;
+ }
+
+ self.write_indent(level);
+ let line = self.iter.next().unwrap()?;
+ let line = line.trim_start();
+ let line = process_inline_macros(line)?;
+ if let Some(stripped) = line.strip_suffix('+') {
+ self.output.push_str(stripped);
+ self.output.push('\\');
+ } else {
+ self.output.push_str(&line);
+ }
+ self.output.push('\n');
+ }
+
+ Ok(())
+ }
+
+ fn skip_blank_lines(&mut self) -> anyhow::Result<()> {
+ while let Some(line) = self.iter.peek() {
+ if !line.as_deref().unwrap().is_empty() {
+ break;
+ }
+ self.iter.next().unwrap()?;
+ }
+ Ok(())
+ }
+
+ fn write_title(&mut self, indent: usize, title: &str) {
+ for _ in 0..indent {
+ self.output.push('#');
+ }
+ self.output.push(' ');
+ self.output.push_str(title);
+ self.output.push('\n');
+ }
+
+ fn write_list_item(&mut self, item: &str, nesting: &ListNesting) {
+ let (marker, indent) = nesting.marker();
+ self.write_indent(indent);
+ self.output.push_str(marker);
+ self.output.push_str(item);
+ self.output.push('\n');
+ }
+
+ fn write_caption_line(&mut self, caption: &str, indent: usize) {
+ self.write_indent(indent);
+ self.output.push('_');
+ self.output.push_str(caption);
+ self.output.push_str("_\\\n");
+ }
+
+ fn write_indent(&mut self, indent: usize) {
+ for _ in 0..indent {
+ self.output.push(' ');
+ }
+ }
+
+ fn write_line(&mut self, line: &str, indent: usize) {
+ self.write_indent(indent);
+ self.output.push_str(line);
+ self.output.push('\n');
+ }
+}
+
+pub(crate) fn convert_asciidoc_to_markdown<R>(input: R) -> anyhow::Result<String>
+where
+ R: BufRead,
+{
+ let mut output = String::new();
+ let mut iter = input.lines().peekable();
+
+ let mut converter = Converter::new(&mut iter, &mut output);
+ converter.process()?;
+
+ Ok(output)
+}
+
+fn get_title(line: &str) -> Option<(usize, &str)> {
+ strip_prefix_symbol(line, '=')
+}
+
+fn get_list_item(line: &str) -> Option<(ListMarker, &str)> {
+ const HYPHEN_MARKER: &str = "- ";
+ if let Some(text) = line.strip_prefix(HYPHEN_MARKER) {
+ Some((ListMarker::Hyphen, text))
+ } else if let Some((count, text)) = strip_prefix_symbol(line, '*') {
+ Some((ListMarker::Asterisk(count), text))
+ } else if let Some((count, text)) = strip_prefix_symbol(line, '.') {
+ Some((ListMarker::Dot(count), text))
+ } else {
+ None
+ }
+}
+
+fn strip_prefix_symbol(line: &str, symbol: char) -> Option<(usize, &str)> {
+ let mut iter = line.chars();
+ if iter.next()? != symbol {
+ return None;
+ }
+ let mut count = 1;
+ loop {
+ match iter.next() {
+ Some(ch) if ch == symbol => {
+ count += 1;
+ }
+ Some(' ') => {
+ break;
+ }
+ _ => return None,
+ }
+ }
+ Some((count, iter.as_str()))
+}
+
+fn parse_media_block<'a>(line: &'a str, prefix: &str) -> Option<(&'a str, &'a str)> {
+ if let Some(line) = line.strip_prefix(prefix) {
+ if let Some((url, rest)) = line.split_once('[') {
+ if let Some(attrs) = rest.strip_suffix(']') {
+ return Some((url, attrs));
+ }
+ }
+ }
+ None
+}
+
+#[derive(Debug)]
+struct ListNesting(Vec<ListMarker>);
+
+impl ListNesting {
+ fn new() -> Self {
+ Self(Vec::<ListMarker>::with_capacity(6))
+ }
+
+ fn current(&mut self) -> Option<&ListMarker> {
+ self.0.last()
+ }
+
+ fn set_current(&mut self, marker: ListMarker) {
+ let Self(markers) = self;
+ if let Some(index) = markers.iter().position(|m| *m == marker) {
+ markers.truncate(index + 1);
+ } else {
+ markers.push(marker);
+ }
+ }
+
+ fn indent(&self) -> usize {
+ self.0.iter().map(|m| m.in_markdown().len()).sum()
+ }
+
+ fn marker(&self) -> (&str, usize) {
+ let Self(markers) = self;
+ let indent = markers.iter().take(markers.len() - 1).map(|m| m.in_markdown().len()).sum();
+ let marker = match markers.last() {
+ None => "",
+ Some(marker) => marker.in_markdown(),
+ };
+ (marker, indent)
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+enum ListMarker {
+ Asterisk(usize),
+ Hyphen,
+ Dot(usize),
+}
+
+impl ListMarker {
+ fn in_markdown(&self) -> &str {
+ match self {
+ ListMarker::Asterisk(_) => "- ",
+ ListMarker::Hyphen => "- ",
+ ListMarker::Dot(_) => "1. ",
+ }
+ }
+}
+
+fn process_inline_macros(line: &str) -> anyhow::Result<Cow<'_, str>> {
+ let mut chars = line.char_indices();
+ loop {
+ let (start, end, a_macro) = match get_next_line_component(&mut chars) {
+ Component::None => break,
+ Component::Text => continue,
+ Component::Macro(s, e, m) => (s, e, m),
+ };
+ let mut src = line.chars();
+ let mut processed = String::new();
+ for _ in 0..start {
+ processed.push(src.next().unwrap());
+ }
+ processed.push_str(a_macro.process()?.as_str());
+ for _ in start..end {
+ let _ = src.next().unwrap();
+ }
+ let mut pos = end;
+
+ loop {
+ let (start, end, a_macro) = match get_next_line_component(&mut chars) {
+ Component::None => break,
+ Component::Text => continue,
+ Component::Macro(s, e, m) => (s, e, m),
+ };
+ for _ in pos..start {
+ processed.push(src.next().unwrap());
+ }
+ processed.push_str(a_macro.process()?.as_str());
+ for _ in start..end {
+ let _ = src.next().unwrap();
+ }
+ pos = end;
+ }
+ for ch in src {
+ processed.push(ch);
+ }
+ return Ok(Cow::Owned(processed));
+ }
+ Ok(Cow::Borrowed(line))
+}
+
+fn get_next_line_component(chars: &mut std::str::CharIndices<'_>) -> Component {
+ let (start, mut macro_name) = match chars.next() {
+ None => return Component::None,
+ Some((_, ch)) if ch == ' ' || !ch.is_ascii() => return Component::Text,
+ Some((pos, ch)) => (pos, String::from(ch)),
+ };
+ loop {
+ match chars.next() {
+ None => return Component::None,
+ Some((_, ch)) if ch == ' ' || !ch.is_ascii() => return Component::Text,
+ Some((_, ':')) => break,
+ Some((_, ch)) => macro_name.push(ch),
+ }
+ }
+
+ let mut macro_target = String::new();
+ loop {
+ match chars.next() {
+ None => return Component::None,
+ Some((_, ' ')) => return Component::Text,
+ Some((_, '[')) => break,
+ Some((_, ch)) => macro_target.push(ch),
+ }
+ }
+
+ let mut attr_value = String::new();
+ let end = loop {
+ match chars.next() {
+ None => return Component::None,
+ Some((pos, ']')) => break pos + 1,
+ Some((_, ch)) => attr_value.push(ch),
+ }
+ };
+
+ Component::Macro(start, end, Macro::new(macro_name, macro_target, attr_value))
+}
+
+enum Component {
+ None,
+ Text,
+ Macro(usize, usize, Macro),
+}
+
+struct Macro {
+ name: String,
+ target: String,
+ attrs: String,
+}
+
+impl Macro {
+ fn new(name: String, target: String, attrs: String) -> Self {
+ Self { name, target, attrs }
+ }
+
+ fn process(&self) -> anyhow::Result<String> {
+ let name = &self.name;
+ let text = match name.as_str() {
+ "https" => {
+ let url = &self.target;
+ let anchor_text = &self.attrs;
+ format!("[{anchor_text}](https:{url})")
+ }
+ "image" => {
+ let url = &self.target;
+ let alt = &self.attrs;
+ format!("![{alt}]({url})")
+ }
+ "kbd" => {
+ let keys = self.attrs.split('+').map(|k| Cow::Owned(format!("<kbd>{k}</kbd>")));
+ keys.collect::<Vec<_>>().join("+")
+ }
+ "pr" => {
+ let pr = &self.target;
+ let url = format!("https://github.com/rust-analyzer/rust-analyzer/pull/{pr}");
+ format!("[`#{pr}`]({url})")
+ }
+ "commit" => {
+ let hash = &self.target;
+ let short = &hash[0..7];
+ let url = format!("https://github.com/rust-analyzer/rust-analyzer/commit/{hash}");
+ format!("[`{short}`]({url})")
+ }
+ "release" => {
+ let date = &self.target;
+ let url = format!("https://github.com/rust-analyzer/rust-analyzer/releases/{date}");
+ format!("[`{date}`]({url})")
+ }
+ _ => bail!("macro not supported: {name}"),
+ };
+ Ok(text)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::fs::read_to_string;
+
+ #[test]
+ fn test_asciidoc_to_markdown_conversion() {
+ let input = read_to_string("test_data/input.adoc").unwrap();
+ let expected = read_to_string("test_data/expected.md").unwrap();
+ let actual = convert_asciidoc_to_markdown(std::io::Cursor::new(&input)).unwrap();
+
+ assert_eq!(actual, expected);
+ }
+
+ macro_rules! test_inline_macro_processing {
+ ($((
+ $name:ident,
+ $input:expr,
+ $expected:expr
+ ),)*) => ($(
+ #[test]
+ fn $name() {
+ let input = $input;
+ let actual = process_inline_macros(&input).unwrap();
+ let expected = $expected;
+ assert_eq!(actual, expected)
+ }
+ )*);
+ }
+
+ test_inline_macro_processing! {
+ (inline_macro_processing_for_empty_line, "", ""),
+ (inline_macro_processing_for_line_with_no_macro, "foo bar", "foo bar"),
+ (
+ inline_macro_processing_for_macro_in_line_start,
+ "kbd::[Ctrl+T] foo",
+ "<kbd>Ctrl</kbd>+<kbd>T</kbd> foo"
+ ),
+ (
+ inline_macro_processing_for_macro_in_line_end,
+ "foo kbd::[Ctrl+T]",
+ "foo <kbd>Ctrl</kbd>+<kbd>T</kbd>"
+ ),
+ (
+ inline_macro_processing_for_macro_in_the_middle_of_line,
+ "foo kbd::[Ctrl+T] foo",
+ "foo <kbd>Ctrl</kbd>+<kbd>T</kbd> foo"
+ ),
+ (
+ inline_macro_processing_for_several_macros,
+ "foo kbd::[Ctrl+T] foo kbd::[Enter] foo",
+ "foo <kbd>Ctrl</kbd>+<kbd>T</kbd> foo <kbd>Enter</kbd> foo"
+ ),
+ (
+ inline_macro_processing_for_several_macros_without_text_in_between,
+ "foo kbd::[Ctrl+T]kbd::[Enter] foo",
+ "foo <kbd>Ctrl</kbd>+<kbd>T</kbd><kbd>Enter</kbd> foo"
+ ),
+ }
+}
diff --git a/src/tools/rust-analyzer/xtask/src/release.rs b/src/tools/rust-analyzer/xtask/src/release.rs
index eda8fceef..4a3069147 100644
--- a/src/tools/rust-analyzer/xtask/src/release.rs
+++ b/src/tools/rust-analyzer/xtask/src/release.rs
@@ -64,8 +64,8 @@ impl flags::Release {
let prev_tag = tags.lines().filter(|line| is_release_tag(line)).last().unwrap();
let contents = changelog::get_changelog(sh, changelog_n, &commit, prev_tag, &today)?;
- let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n));
- sh.write_file(&path, &contents)?;
+ let path = changelog_dir.join(format!("{today}-changelog-{changelog_n}.adoc"));
+ sh.write_file(path, contents)?;
Ok(())
}
diff --git a/src/tools/rust-analyzer/xtask/src/release/changelog.rs b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
index 2647f7794..d2a1483e3 100644
--- a/src/tools/rust-analyzer/xtask/src/release/changelog.rs
+++ b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
@@ -23,9 +23,9 @@ pub(crate) fn get_changelog(
let mut others = String::new();
for line in git_log.lines() {
let line = line.trim_start();
- if let Some(pr_num) = parse_pr_number(&line) {
+ if let Some(pr_num) = parse_pr_number(line) {
let accept = "Accept: application/vnd.github.v3+json";
- let authorization = format!("Authorization: token {}", token);
+ let authorization = format!("Authorization: token {token}");
let pr_url = "https://api.github.com/repos/rust-lang/rust-analyzer/issues";
// we don't use an HTTPS client or JSON parser to keep the build times low
@@ -57,36 +57,36 @@ pub(crate) fn get_changelog(
PrKind::Other => &mut others,
PrKind::Skip => continue,
};
- writeln!(s, "* pr:{}[] {}", pr_num, l.message.as_deref().unwrap_or(&pr_title)).unwrap();
+ writeln!(s, "* pr:{pr_num}[] {}", l.message.as_deref().unwrap_or(&pr_title)).unwrap();
}
}
let contents = format!(
"\
-= Changelog #{}
+= Changelog #{changelog_n}
:sectanchors:
+:experimental:
:page-layout: post
-Commit: commit:{}[] +
-Release: release:{}[]
+Commit: commit:{commit}[] +
+Release: release:{today}[]
== New Features
-{}
+{features}
== Fixes
-{}
+{fixes}
== Internal Improvements
-{}
+{internal}
== Others
-{}
-",
- changelog_n, commit, today, features, fixes, internal, others
+{others}
+"
);
Ok(contents)
}
@@ -112,11 +112,9 @@ fn unescape(s: &str) -> String {
fn parse_pr_number(s: &str) -> Option<u32> {
const BORS_PREFIX: &str = "Merge #";
const HOMU_PREFIX: &str = "Auto merge of #";
- if s.starts_with(BORS_PREFIX) {
- let s = &s[BORS_PREFIX.len()..];
+ if let Some(s) = s.strip_prefix(BORS_PREFIX) {
s.parse().ok()
- } else if s.starts_with(HOMU_PREFIX) {
- let s = &s[HOMU_PREFIX.len()..];
+ } else if let Some(s) = s.strip_prefix(HOMU_PREFIX) {
if let Some(space) = s.find(' ') {
s[..space].parse().ok()
} else {
diff --git a/src/tools/rust-analyzer/xtask/test_data/expected.md b/src/tools/rust-analyzer/xtask/test_data/expected.md
new file mode 100644
index 000000000..19c940c67
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/test_data/expected.md
@@ -0,0 +1,81 @@
+# Changelog #256
+
+Hello!
+
+Commit: [`0123456`](https://github.com/rust-analyzer/rust-analyzer/commit/0123456789abcdef0123456789abcdef01234567) \
+Release: [`2022-01-01`](https://github.com/rust-analyzer/rust-analyzer/releases/2022-01-01)
+
+## New Features
+
+- **BREAKING** [`#1111`](https://github.com/rust-analyzer/rust-analyzer/pull/1111) shortcut <kbd>ctrl</kbd>+<kbd>r</kbd>
+ - hyphen-prefixed list item
+- nested list item
+ - `foo` -> `foofoo`
+ - `bar` -> `barbar`
+- listing in the secondary level
+ 1. install
+ 1. add to config
+
+ ```json
+ {"foo":"bar"}
+ ```
+- list item with continuation
+
+ ![](https://example.com/animation.gif)
+
+ ![alt text](https://example.com/animation.gif)
+
+ <video src="https://example.com/movie.mp4" controls loop>Your browser does not support the video tag.</video>
+
+ <video src="https://example.com/movie.mp4" autoplay controls loop>Your browser does not support the video tag.</video>
+
+ _Image_\
+ ![](https://example.com/animation.gif)
+
+ _Video_\
+ <video src="https://example.com/movie.mp4" controls loop>Your browser does not support the video tag.</video>
+
+ ```bash
+ rustup update nightly
+ ```
+
+ ```
+ This is a plain listing.
+ ```
+- single line item followed by empty lines
+- multiline list
+ item followed by empty lines
+- multiline list
+ item with indent
+- multiline list
+ item not followed by empty lines
+- multiline list
+ item followed by different marker
+ - foo
+ - bar
+- multiline list
+ item followed by list continuation
+
+ paragraph
+ paragraph
+
+## Another Section
+
+- foo bar baz
+- list item with an inline image
+ ![](https://example.com/animation.gif)
+
+The highlight of the month is probably [`#1111`](https://github.com/rust-analyzer/rust-analyzer/pull/1111).
+See [online manual](https://example.com/manual) for more information.
+
+```bash
+rustup update nightly
+```
+
+```
+rustup update nightly
+```
+
+```
+This is a plain listing.
+```
diff --git a/src/tools/rust-analyzer/xtask/test_data/input.adoc b/src/tools/rust-analyzer/xtask/test_data/input.adoc
new file mode 100644
index 000000000..105bd8df0
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/test_data/input.adoc
@@ -0,0 +1,90 @@
+= Changelog #256
+:sectanchors:
+:page-layout: post
+
+Hello!
+
+Commit: commit:0123456789abcdef0123456789abcdef01234567[] +
+Release: release:2022-01-01[]
+
+== New Features
+
+* **BREAKING** pr:1111[] shortcut kbd:[ctrl+r]
+- hyphen-prefixed list item
+* nested list item
+** `foo` -> `foofoo`
+** `bar` -> `barbar`
+* listing in the secondary level
+. install
+. add to config
++
+[source,json]
+----
+{"foo":"bar"}
+----
+* list item with continuation
++
+image::https://example.com/animation.gif[]
++
+image::https://example.com/animation.gif["alt text"]
++
+video::https://example.com/movie.mp4[options=loop]
++
+video::https://example.com/movie.mp4[options="autoplay,loop"]
++
+.Image
+image::https://example.com/animation.gif[]
++
+.Video
+video::https://example.com/movie.mp4[options=loop]
++
+[source,bash]
+----
+rustup update nightly
+----
++
+----
+This is a plain listing.
+----
+* single line item followed by empty lines
+
+* multiline list
+item followed by empty lines
+
+* multiline list
+ item with indent
+
+* multiline list
+item not followed by empty lines
+* multiline list
+item followed by different marker
+** foo
+** bar
+* multiline list
+item followed by list continuation
++
+paragraph
+paragraph
+
+== Another Section
+
+* foo bar baz
+* list item with an inline image
+ image:https://example.com/animation.gif[]
+
+The highlight of the month is probably pr:1111[].
+See https://example.com/manual[online manual] for more information.
+
+[source,bash]
+----
+rustup update nightly
+----
+
+[source]
+----
+rustup update nightly
+----
+
+----
+This is a plain listing.
+----