Skip to content

Commit

Permalink
a few more spelling and lints
Browse files Browse the repository at this point in the history
  • Loading branch information
nyurik committed Dec 15, 2023
1 parent 5687a2e commit aaf8332
Show file tree
Hide file tree
Showing 7 changed files with 14 additions and 16 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/publish-crates-io.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
# This pulls from the "Get Changelog Entry" step above, referencing it's ID to get its outputs object.
# This pulls from the "Get Changelog Entry" step above, referencing its ID to get its outputs object.
# See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
tag_name: "v${{ steps.changelog_reader.outputs.version }}"
name: "serde_with v${{ steps.changelog_reader.outputs.version }}"
Expand Down
4 changes: 2 additions & 2 deletions serde_with/src/chrono_0_4.rs
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ impl<'de> DeserializeAs<'de, NaiveDateTime> for DateTime<Utc> {
}
}

/// Convert a [`chrono_0_4::Duration`] into a [`DurationSigned`]
/// Convert a [`Duration`] into a [`DurationSigned`]
fn duration_into_duration_signed(dur: &Duration) -> DurationSigned {
match dur.to_std() {
Ok(dur) => DurationSigned::with_duration(Sign::Positive, dur),
Expand All @@ -205,7 +205,7 @@ fn duration_into_duration_signed(dur: &Duration) -> DurationSigned {
}
}

/// Convert a [`DurationSigned`] into a [`chrono_0_4::Duration`]
/// Convert a [`DurationSigned`] into a [`Duration`]
fn duration_from_duration_signed<'de, D>(dur: DurationSigned) -> Result<Duration, D::Error>
where
D: Deserializer<'de>,
Expand Down
4 changes: 2 additions & 2 deletions serde_with/src/enum_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -737,7 +737,7 @@ where
self.deserialize_seq(visitor)
}

serde::forward_to_deserialize_any! {
forward_to_deserialize_any! {
bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
bytes byte_buf option unit unit_struct newtype_struct tuple
tuple_struct map struct enum identifier ignored_any
Expand Down Expand Up @@ -819,7 +819,7 @@ where
visitor.visit_enum(self)
}

serde::forward_to_deserialize_any! {
forward_to_deserialize_any! {
bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
bytes byte_buf option unit unit_struct newtype_struct seq tuple
tuple_struct map struct identifier ignored_any
Expand Down
2 changes: 1 addition & 1 deletion serde_with/src/guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ The crate offers four types of functionality.
## 1. A more flexible and composable replacement for the with annotation, called `serde_as`

This is an alternative to [serde's with-annotation][with-annotation], which adds flexibility and composability to the scheme.
The main downside is that it work with fewer types than [with-annotations][with-annotation].
The main downside is that it works with fewer types than [with-annotations][with-annotation].
However, all types from the Rust Standard Library should be supported in all combinations and any missing entry is a bug.

You mirror the type structure of the field you want to de/serialize.
Expand Down
4 changes: 2 additions & 2 deletions serde_with/tests/serde_as/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ use expect_test::expect;
use serde::{Deserialize, Serialize};
use serde_with::{
formats::{CommaSeparator, Flexible, Strict},
serde_as, BoolFromInt, BytesOrString, DisplayFromStr, Map, NoneAsEmptyString, OneOrMany, Same,
Seq, StringWithSeparator,
serde_as, BoolFromInt, BytesOrString, DisplayFromStr, NoneAsEmptyString, OneOrMany, Same,
StringWithSeparator,
};
use std::{
collections::HashMap,
Expand Down
12 changes: 5 additions & 7 deletions serde_with_macros/src/apply.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,14 +77,12 @@ pub fn apply(args: TokenStream, input: TokenStream) -> TokenStream {
.alt_crate_path
.unwrap_or_else(|| syn::parse_quote!(::serde_with));

let res = match super::apply_function_to_struct_and_enum_fields_darling(
let res = super::apply_function_to_struct_and_enum_fields_darling(
input,
&serde_with_crate_path,
&prepare_apply_attribute_to_field(args),
) {
Ok(res) => res,
Err(err) => err.write_errors(),
};
)
.unwrap_or_else(|err| err.write_errors());
TokenStream::from(res)
}

Expand Down Expand Up @@ -160,15 +158,15 @@ fn ty_pattern_matches_ty(ty_pattern: &Type, ty: &Type) -> bool {
///
/// Two paths match if they are equal except for the path arguments.
/// Path arguments are generics on types or functions.
/// If the pattern has no argument, it can match with everthing.
/// If the pattern has no argument, it can match with everything.
/// If the pattern does have an argument, the other side must be equal.
fn path_pattern_matches_path(path_pattern: &Path, path: &Path) -> bool {
if path_pattern.leading_colon != path.leading_colon
|| path_pattern.segments.len() != path.segments.len()
{
return false;
}
// Boths parts are equal length
// Both parts are equal length
std::iter::zip(&path_pattern.segments, &path.segments).all(
|(path_pattern_segment, path_segment)| {
let ident_equal = path_pattern_segment.ident == path_segment.ident;
Expand Down
2 changes: 1 addition & 1 deletion serde_with_macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -798,7 +798,7 @@ fn replace_infer_type_with_type(to_replace: Type, replacement: &Type) -> Type {
Type::Paren(inner)
}
Type::Path(mut inner) => {
if let Some(Pair::End(mut t) | Pair::Punctuated(mut t, _)) = inner.path.segments.pop() {
if let Some(Pair::End(mut t) | Pair::Punctuated(t, _)) = inner.path.segments.pop() {

Check failure on line 801 in serde_with_macros/src/lib.rs

View workflow job for this annotation

GitHub Actions / Build and Test (ubuntu-latest, 1.64)

variable `t` is bound inconsistently across alternatives separated by `|`

Check failure on line 801 in serde_with_macros/src/lib.rs

View workflow job for this annotation

GitHub Actions / clippy_check (ubuntu-latest, stable)

variable `t` is bound inconsistently across alternatives separated by `|`

Check failure on line 801 in serde_with_macros/src/lib.rs

View workflow job for this annotation

GitHub Actions / clippy_check (ubuntu-latest, nightly)

variable `t` is bound inconsistently across alternatives separated by `|`

Check failure on line 801 in serde_with_macros/src/lib.rs

View workflow job for this annotation

GitHub Actions / Build and Test (ubuntu-latest, 1.70)

variable `t` is bound inconsistently across alternatives separated by `|`

Check failure on line 801 in serde_with_macros/src/lib.rs

View workflow job for this annotation

GitHub Actions / clippy_check (windows-latest, stable)

variable `t` is bound inconsistently across alternatives separated by `|`

Check failure on line 801 in serde_with_macros/src/lib.rs

View workflow job for this annotation

GitHub Actions / Build and Test (ubuntu-latest, stable)

variable `t` is bound inconsistently across alternatives separated by `|`

Check failure on line 801 in serde_with_macros/src/lib.rs

View workflow job for this annotation

GitHub Actions / Build and Test (ubuntu-latest, beta)

variable `t` is bound inconsistently across alternatives separated by `|`

Check failure on line 801 in serde_with_macros/src/lib.rs

View workflow job for this annotation

GitHub Actions / clippy_check (windows-latest, nightly)

variable `t` is bound inconsistently across alternatives separated by `|`

Check failure on line 801 in serde_with_macros/src/lib.rs

View workflow job for this annotation

GitHub Actions / Build and Test (ubuntu-latest, nightly)

variable `t` is bound inconsistently across alternatives separated by `|`
t.arguments = match t.arguments {
PathArguments::None => PathArguments::None,
PathArguments::AngleBracketed(mut inner) => {
Expand Down

0 comments on commit aaf8332

Please sign in to comment.