Skip to content

Commit

Permalink
Merge branch 'main' into 162
Browse files Browse the repository at this point in the history
  • Loading branch information
crowlKats committed Nov 29, 2023
2 parents 1e13aaf + e65cfc9 commit 85ef22d
Show file tree
Hide file tree
Showing 7 changed files with 14 additions and 15 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,4 @@ fn main() {
We appreciate your help!

The code of conduct from the Deno repository applies here too:
https://github.com/denoland/deno/blob/main/CODE_OF_CONDUCT.md.
https://github.com/denoland/deno/blob/main/.github/CODE_OF_CONDUCT.md.
4 changes: 2 additions & 2 deletions src/canonicalize_and_process.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ pub fn canonicalize_protocol(value: &str) -> Result<String, Error> {
if value.is_empty() {
return Ok(String::new());
}
url::Url::parse(&format!("{}://dummy.test", value))
url::Url::parse(&format!("{value}://dummy.test"))
.map(|url| url.scheme().to_owned())
.map_err(Error::Url)
}
Expand Down Expand Up @@ -92,7 +92,7 @@ pub fn canonicalize_pathname(value: &str) -> Result<String, Error> {
}
let leading_slash = value.starts_with('/');
let modified_value = if !leading_slash {
format!("/-{}", value)
format!("/-{value}")
} else {
value.to_string()
};
Expand Down
2 changes: 1 addition & 1 deletion src/component.rs
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ fn generate_pattern_string(part_list: &[&Part], options: &Options) -> String {
{
result.push('*');
} else {
result.push_str(&format!("({})", FULL_WILDCARD_REGEXP_VALUE));
result.push_str(&format!("({FULL_WILDCARD_REGEXP_VALUE})"));
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/constructor_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ impl<'a> ConstructorStringParser<'a> {
if index < self.token_list.len() {
&self.token_list[index]
} else {
assert!(self.token_list.len() <= 1);
assert!(!self.token_list.is_empty());
let token = self.token_list.last().unwrap();
assert!(token.kind == TokenType::End);
token
Expand Down
14 changes: 7 additions & 7 deletions src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,21 @@ use derive_more::Display;

use crate::tokenizer::TokenType;

/// A error occuring during URL pattern construction, or matching.
/// A error occurring during URL pattern construction, or matching.
#[derive(Display)]
pub enum Error {
#[display(fmt = "a relative input without a base URL is not valid")]
BaseUrlRequired,

#[display(
fmt = "specifying both an init object, and a seperate base URL is not valid"
fmt = "specifying both an init object, and a separate base URL is not valid"
)]
BaseUrlWithInit,

#[display(fmt = "tokenizer error: {} (at char {})", _0, _1)]
#[display(fmt = "tokenizer error: {_0} (at char {_1})")]
Tokenizer(TokenizerError, usize),

#[display(fmt = "parser error: {}", _0)]
#[display(fmt = "parser error: {_0}")]
Parser(ParserError),

Url(url::ParseError),
Expand All @@ -39,15 +39,15 @@ pub enum TokenizerError {
IncompleteEscapeCode,
#[display(fmt = "invalid name; must be at least length 1")]
InvalidName,
#[display(fmt = "invalid regex: {}", _0)]
#[display(fmt = "invalid regex: {_0}")]
InvalidRegex(&'static str),
}

#[derive(Debug, Display)]
pub enum ParserError {
#[display(fmt = "expected token {}, found '{}' of type {}", _0, _2, _1)]
#[display(fmt = "expected token {_0}, found '{_2}' of type {_1}")]
ExpectedToken(TokenType, TokenType, String),

#[display(fmt = "pattern contains duplicate name {}", _0)]
#[display(fmt = "pattern contains duplicate name {_0}")]
DuplicateName(String),
}
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -592,7 +592,7 @@ mod tests {
);

if let Some(reason) = case.skip {
println!("🟠 Skipping: {}", reason);
println!("🟠 Skipping: {reason}");
return;
}

Expand Down
3 changes: 1 addition & 2 deletions src/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,7 @@ pub fn tokenize(
};

while tokenizer.index < tokenizer.input.len() {
tokenizer.next_index = tokenizer.index;
tokenizer.get_next_codepoint();
tokenizer.seek_and_get_next_codepoint(tokenizer.index);

if tokenizer.code_point == Some('*') {
tokenizer.add_token_with_default_pos_and_len(TokenType::Asterisk);
Expand Down

0 comments on commit 85ef22d

Please sign in to comment.