Skip to content

Commit 17bcaa8

Browse files
committed
fix CI
1 parent c52a259 commit 17bcaa8

File tree

5 files changed

+17
-24
lines changed

5 files changed

+17
-24
lines changed

.github/workflows/ci.yml

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -45,13 +45,6 @@ jobs:
4545
- name: Run cargo test
4646
run: cargo test --workspace
4747

48-
- name: Install nightly toolchain
49-
uses: dtolnay/rust-toolchain@nightly
50-
51-
- name: Run cargo miri test
52-
run: cargo miri test --workspace
53-
54-
5548
miri:
5649
name: Miri
5750
runs-on: ubuntu-latest

src/intern.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ use hashbrown::raw::RawTable;
1212
/// of using the token data directly.
1313
/// This allows for much better performance by amortizing the cost hashing/equality.
1414
///
15-
/// While you can intern tokens yourself it is strongly recommended to use [`InternedInput`](crate::intern::InternedInput) module.
15+
/// While you can intern tokens yourself it is strongly recommended to use [`InternedInput`] module.
1616
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
1717
#[repr(transparent)]
1818
pub struct Token(pub u32);
@@ -44,7 +44,7 @@ pub trait TokenSource {
4444
/// of using the token data directly.
4545
/// This allows for much better performance by amortizing the cost hashing/equality.
4646
///
47-
/// While you can intern tokens yourself it is strongly recommended to use [`InternedInput`](crate::intern::InternedInput) module.
47+
/// While you can intern tokens yourself it is strongly recommended to use [`InternedInput`] module.
4848
#[derive(Default)]
4949
pub struct InternedInput<T: Eq + Hash> {
5050
pub before: Vec<Token>,

src/sink.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ pub trait Sink: Sized {
3535
/// to obtain the final diff result
3636
fn finish(self) -> Self::Out;
3737

38-
/// Utility method that constructs a [`Counter`](crate::sink::Counter) that tracks the total number
38+
/// Utility method that constructs a [`Counter`] that tracks the total number
3939
/// of inserted and removed tokens in the changes passed to [`process_change`](crate::Sink::process_change).
4040
fn with_counter(self) -> Counter<Self> {
4141
Counter::new(self)
@@ -58,7 +58,7 @@ impl Sink for () {
5858
fn finish(self) -> Self::Out {}
5959
}
6060

61-
/// A [`Sink`](crate::Sink) which wraps a different sink
61+
/// A [`Sink`] which wraps a different sink
6262
/// and counts the number of `removed` and `inserted` [tokens](crate::intern::Token).
6363
pub struct Counter<T> {
6464
/// Total number of recorded inserted [`tokens`](crate::intern::Token).
@@ -67,10 +67,10 @@ pub struct Counter<T> {
6767
/// Total number of recorded inserted [`tokens`](crate::intern::Token).
6868
/// Computed by summing the lengths of the `after` subsequences pass to [`process_change`](crate::Sink::process_change).
6969
pub insertions: u32,
70-
/// The [`Sink`](crate::Sink) for which the counter records [`tokens`](crate::intern::Token).
70+
/// The [`Sink`] for which the counter records [`tokens`](crate::intern::Token).
7171
/// All calls to [`process_change`](crate::Sink::process_change) are forwarded to the `sink` by the counter.
7272
/// After [`finish`](crate::Sink::finish) is called, this field contains the output returned by the [`finish`](crate::Sink::finish)
73-
/// method of the wrapped [`Sink`](crate::Sink)
73+
/// method of the wrapped [`Sink`].
7474
pub wrapped: T,
7575
}
7676

src/sources.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use std::str::from_utf8_unchecked;
33

44
use crate::TokenSource;
55

6-
/// Returns a [`TokenSource`](crate::intern::TokenSource) that uses
6+
/// Returns a [`TokenSource`] that uses
77
/// the lines in `data` as Tokens. The newline seperator (`\r\n` or `\n`) is
88
/// not included in the emitted tokens.
99
/// This means that changing the newline seperator from `\r\n` to `\n`
@@ -12,7 +12,7 @@ pub fn lines(data: &str) -> Lines<'_, false> {
1212
Lines(ByteLines(data.as_bytes()))
1313
}
1414

15-
/// Returns a [`TokenSource`](crate::intern::TokenSource) that uses
15+
/// Returns a [`TokenSource`] that uses
1616
/// the lines in `data` as Tokens. The newline seperator (`\r\n` or `\n`) is
1717
/// included in the emitted tokens.
1818
/// This means that changing the newline seperator from `\r\n` to `\n`
@@ -21,7 +21,7 @@ pub fn lines_with_terminator(data: &str) -> Lines<'_, true> {
2121
Lines(ByteLines(data.as_bytes()))
2222
}
2323

24-
/// Returns a [`TokenSource`](crate::intern::TokenSource) that uses
24+
/// Returns a [`TokenSource`] that uses
2525
/// the lines in `data` as Tokens. A lines is a continous subslice of
2626
/// `data` which does not contain `\n` (or `\r\n`).
2727
/// The newline seperator (`\r\n` or `\n`) is not included in the emitted tokens.
@@ -31,7 +31,7 @@ pub fn byte_lines_with_terminator(data: &[u8]) -> ByteLines<'_, true> {
3131
ByteLines(data)
3232
}
3333

34-
/// Returns a [`TokenSource`](crate::intern::TokenSource) that uses
34+
/// Returns a [`TokenSource`] that uses
3535
/// the lines in `data` as Tokens. The newline seperator (`\r\n` or `\n`) is
3636
/// included in the emitted tokens.
3737
/// This means that changing the newline seperator from `\r\n` to `\n`
@@ -69,8 +69,8 @@ impl<'a> TokenSource for &'a [u8] {
6969
}
7070
}
7171

72-
/// A [`TokenSource`](crate::intern::TokenSource) that returns the lines of a `str` as tokens.
73-
/// See [`lines`](crate::sources::lines) and [`lines_with_terminator`](crate::sources::lines_with_terminator) for details
72+
/// A [`TokenSource`] that returns the lines of a `str` as tokens.
73+
/// See [`lines`] and [`lines_with_terminator`] for details
7474
#[derive(Clone, Copy, PartialEq, Eq)]
7575
pub struct Lines<'a, const INCLUDE_LINE_TERMINATOR: bool>(ByteLines<'a, INCLUDE_LINE_TERMINATOR>);
7676

@@ -99,8 +99,8 @@ impl<'a, const INCLUDE_LINE_TERMINATOR: bool> TokenSource for Lines<'a, INCLUDE_
9999
}
100100
}
101101

102-
/// A [`TokenSource`](crate::intern::TokenSource) that returns the lines of a byte slice as tokens.
103-
/// See [`byte_lines`](crate::sources::lines) and [`byte_lines_with_terminator`](crate::sources::byte_lines_with_terminator) for details
102+
/// A [`TokenSource`] that returns the lines of a byte slice as tokens.
103+
/// See [`byte_lines`] and [`byte_lines_with_terminator`] for details
104104
#[derive(Clone, Copy, PartialEq, Eq)]
105105
pub struct ByteLines<'a, const INCLUDE_LINE_TERMINATOR: bool>(&'a [u8]);
106106

src/unified_diff.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use std::ops::Range;
55
use crate::intern::{InternedInput, Interner, Token};
66
use crate::Sink;
77

8-
/// A [`Sink`](crate::sink::Sink) that creates a textual diff
8+
/// A [`Sink`] that creates a textual diff
99
/// in the format typically output by git or gnu-diff if the `-u` option is used
1010
pub struct UnifiedDiffBuilder<'a, W, T>
1111
where
@@ -31,7 +31,7 @@ where
3131
T: Hash + Eq + Display,
3232
{
3333
/// Create a new `UnifiedDiffBuilder` for the given `input`,
34-
/// that will return a [`String`](std::string::String).
34+
/// that will return a [`String`].
3535
pub fn new(input: &'a InternedInput<T>) -> Self {
3636
Self {
3737
before_hunk_start: 0,
@@ -54,7 +54,7 @@ where
5454
T: Hash + Eq + Display,
5555
{
5656
/// Create a new `UnifiedDiffBuilder` for the given `input`,
57-
/// that will writes it output to the provided implementation of [`Write`](std::fmt::Write).
57+
/// that will writes it output to the provided implementation of [`Write`].
5858
pub fn with_writer(input: &'a InternedInput<T>, writer: W) -> Self {
5959
Self {
6060
before_hunk_start: 0,

0 commit comments

Comments
 (0)