diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 096c61d..d11c5ea 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -62,7 +62,12 @@ jobs: with: target: wasm32-unknown-unknown toolchain: ${{ matrix.channel }} - + - run: rustup target add wasm32-wasip1 + - run: rustup target add wasm32-wasip2 + - run: rustup target add wasm32-unknown-unknown - run: cargo check --verbose --target wasm32-unknown-unknown --examples - run: cargo check --verbose --target wasm32-unknown-unknown - + - run: cargo check --verbose --target wasm32-wasip1 --examples + - run: cargo check --verbose --target wasm32-wasip1 + - run: cargo check --verbose --target wasm32-wasip2 --examples + - run: cargo check --verbose --target wasm32-wasip2 \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 07cb6c5..58214eb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -441,6 +441,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", + "serde", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 1b0612a..e87c1a5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,7 +20,7 @@ edition = "2021" lopdf = { git = "https://github.com/J-F-Liu/lopdf", rev = "80a6c505bf8303c74b3057c8e53d4f791e92e126", default-features = false, features = [ "nom_parser", ] } -time = { version = "0.3.25", default-features = false, features = ["std"] } +time = { version = "0.3.25", default-features = false, features = ["std", "serde", "serde-human-readable"] } allsorts = { version = "0.15", git = "https://github.com/fschutt/allsorts", branch = "optional-brotli", default-features = false, features = ["flate2_rust"] } image = { version = "0.25", default-features = false } svg2pdf = { version = "0.12.0" } @@ -40,7 +40,7 @@ base64 = "0.22.1" flate2 = "1.0.35" [features] -default = ["js-sys"] +default = [] wasm = ["wasm-bindgen"] gif = ["image/gif"] jpeg = ["image/jpeg"] diff --git a/examples/render.rs b/examples/render.rs index f87ad33..36a7cf9 100644 --- a/examples/render.rs +++ b/examples/render.rs @@ -14,5 +14,9 @@ fn main() { let page = PdfPage::new(Mm(100.0), Mm(100.0), ops); let svg = page.to_svg(&doc.resources, &PdfToSvgOptions::web()); std::fs::write("./helloworld.svg", svg).unwrap(); - std::fs::write("./helloworld.pdf", doc.with_pages(vec![page]).save(&PdfSaveOptions::default())).unwrap(); + std::fs::write( + "./helloworld.pdf", + doc.with_pages(vec![page]).save(&PdfSaveOptions::default()), + ) + .unwrap(); } diff --git a/src/annotation.rs b/src/annotation.rs index d97b104..fd22f5c 100644 --- a/src/annotation.rs +++ b/src/annotation.rs @@ -1,8 +1,10 @@ //! Bookmarks, page and link annotations +use serde_derive::{Deserialize, Serialize}; + use crate::graphics::Rect; -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct PageAnnotation { /// Name of the bookmark annotation (i.e. "Chapter 5") pub name: String, @@ -10,12 +12,16 @@ pub struct PageAnnotation { pub page: usize, } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct LinkAnnotation { pub rect: Rect, + pub actions: Actions, + + #[serde(default)] pub border: BorderArray, + #[serde(default)] pub color: ColorArray, - pub actions: Actions, + #[serde(default)] pub highlighting: HighlightingMode, } @@ -38,7 +44,8 @@ impl LinkAnnotation { } } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[serde(tag = "type", content = "data")] pub enum BorderArray { Solid([f32; 3]), Dashed([f32; 3], DashPhase), @@ -57,31 +64,20 @@ impl BorderArray { } } -/* - - impl Into for DashPhase { - fn into(self) -> Object { - Object::Array(vec![ - Object::Array(self.dash_array.into_iter().map(|x| Object::Real(x.into())).collect()), - Object::Real(self.phase.into()), - ]) - } - } -*/ - impl Default for BorderArray { fn default() -> Self { BorderArray::Solid([0.0, 0.0, 1.0]) } } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct DashPhase { pub dash_array: Vec, pub phase: f32, } -#[derive(Debug, PartialEq, Clone, Copy)] +#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)] +#[serde(tag = "type", content = "data")] pub enum ColorArray { Transparent, Gray([f32; 1]), @@ -95,8 +91,9 @@ impl Default for ColorArray { } } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[non_exhaustive] +#[serde(tag = "type", content = "data")] pub enum Destination { /// Display `page` with coordinates `top` and `left` positioned at the upper-left corner of the /// window and the contents of the page magnified by `zoom`. @@ -131,7 +128,9 @@ pub enum Destination { Trans (PDF 1.5) Updates the display of a document, using a transition dictionary. “Transition Actions” on page 670 GoTo3DView (PDF 1.6) Set the current view of a 3D annotation “Go-To-3D-View Actions” on page 670 */ -#[derive(Debug, PartialEq, Clone)] + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case", tag = "type", content = "data")] pub enum Actions { GoTo(Destination), URI(String), @@ -158,7 +157,8 @@ impl Actions { } } -#[derive(Debug, PartialEq, Clone, Copy, Default)] +#[derive(Debug, PartialEq, Clone, Copy, Default, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum HighlightingMode { None, #[default] diff --git a/src/color.rs b/src/color.rs index 8f56ab0..feba4ff 100644 --- a/src/color.rs +++ b/src/color.rs @@ -1,7 +1,10 @@ +use serde_derive::{Deserialize, Serialize}; + use crate::IccProfileId; /// Color space (enum for marking the number of bits a color has) -#[derive(Debug, Copy, PartialEq, Clone)] +#[derive(Debug, Copy, PartialEq, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum ColorSpace { Rgb, Rgba, @@ -38,7 +41,8 @@ impl From for ColorSpace { } /// How many bits does a color have? -#[derive(Debug, Copy, PartialEq, Clone)] +#[derive(Debug, Copy, PartialEq, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum ColorBits { Bit1, Bit8, @@ -56,7 +60,8 @@ impl ColorBits { } /// Wrapper for Rgb, Cmyk and other color types -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(tag = "type", content = "data")] pub enum Color { Rgb(Rgb), Cmyk(Cmyk), @@ -95,11 +100,12 @@ impl Color { } /// RGB color -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Rgb { pub r: f32, pub g: f32, pub b: f32, + #[serde(default)] pub icc_profile: Option, } @@ -115,12 +121,13 @@ impl Rgb { } /// CMYK color -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Cmyk { pub c: f32, pub m: f32, pub y: f32, pub k: f32, + #[serde(default)] pub icc_profile: Option, } @@ -138,9 +145,10 @@ impl Cmyk { } /// Greyscale color -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Greyscale { pub percent: f32, + #[serde(default)] pub icc_profile: Option, } @@ -155,7 +163,7 @@ impl Greyscale { /// Spot colors are like Cmyk, but without color space. They are essentially "named" colors /// from specific vendors - currently they are the same as a CMYK color. -#[derive(Debug, Copy, Clone, PartialEq)] +#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] pub struct SpotColor { pub c: f32, pub m: f32, diff --git a/src/conformance.rs b/src/conformance.rs index 288fca6..e95fc7a 100644 --- a/src/conformance.rs +++ b/src/conformance.rs @@ -6,12 +6,15 @@ //! //! [PDF/A Versions](https://en.wikipedia.org/wiki/PDF/A) +use serde_derive::{Deserialize, Serialize}; + /// List of (relevant) PDF versions /// Please note the difference between **PDF/A** (archiving), **PDF/UA** (universal acessibility), /// **PDF/X** (printing), **PDF/E** (engineering / CAD), **PDF/VT** (large volume transactions with /// repeated content) -#[derive(Debug, PartialEq, Eq, Clone)] +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] #[allow(non_camel_case_types)] +#[serde(rename_all = "kebab-case", tag = "type", content = "data")] pub enum PdfConformance { /// `PDF/A-1b` basic PDF, many features restricted A1B_2005_PDF_1_4, @@ -74,7 +77,7 @@ impl Default for PdfConformance { /// Allows building custom conformance profiles. This is useful if you want very small documents for /// example and you don't __need__ conformance with any PDF standard, you just want a PDF file. -#[derive(Debug, PartialEq, Eq, Clone)] +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub struct CustomPdfConformance { /// Identifier for this conformance /// diff --git a/src/date.rs b/src/date.rs index 79ce933..58fbbbd 100644 --- a/src/date.rs +++ b/src/date.rs @@ -1,26 +1,38 @@ -#[cfg(not(any(target_arch = "wasm32", target_os = "unknown")))] +#[cfg(not(target_arch = "wasm32"))] pub use time::{OffsetDateTime, UtcOffset}; /// wasm32-unknown-unknown polyfill -#[cfg(all(feature = "js-sys", target_arch = "wasm32", target_os = "unknown"))] +#[cfg(all(feature = "js-sys", target_arch = "wasm32"))] pub use self::js_sys_date::OffsetDateTime; -#[cfg(not(feature = "js-sys"))] -#[cfg(any( - all(target_arch = "wasm32", target_os = "unknown"), - all(target_arch = "wasm32", target_os = "wasi") -))] -pub use self::unix_epoch_stub_date::OffsetDateTime; - -#[cfg(all(feature = "js-sys", target_arch = "wasm32", target_os = "unknown"))] +#[cfg(all(feature = "js-sys", target_arch = "wasm32"))] mod js_sys_date { use js_sys::Date; use time::Month; - #[derive(Debug, Clone, PartialEq)] + #[derive(Debug, Clone, Default, PartialEq, PartialOrd, Ord, Eq, Hash)] pub struct OffsetDateTime(Date); + impl serde::Serialize for OffsetDateTime { + fn serialize(&self, serializer: S) -> Result { + "1970-01-01 00:00:00.00 +00:00:00".serialize(serializer) + } + } + + impl<'de> serde::Deserialize<'de> for OffsetDateTime { + fn deserialize>( + deserializer: D, + ) -> Result { + let _ = String::deserialize(deserializer)?; + Ok(OffsetDateTime::now_utc()) + } + } + impl OffsetDateTime { + pub fn unix_timestamp(self) -> i64 { + 0 + } + pub fn from_unix_timestamp(_: i64) -> Option { Some(Self(Date::new(&(1000.0 * 60.0 * 24.0 * 5.0).into()))) } @@ -98,21 +110,39 @@ mod js_sys_date { } } -#[cfg(not(feature = "js-sys"))] -#[cfg(any( - all(target_arch = "wasm32", target_os = "unknown"), - all(target_arch = "wasm32", target_os = "wasi") -))] +#[cfg(all(not(feature = "js-sys"), target_arch = "wasm32"))] +pub use self::unix_epoch_stub_date::OffsetDateTime; +#[cfg(all(not(feature = "js-sys"), target_arch = "wasm32"))] mod unix_epoch_stub_date { use time::Month; - #[derive(Debug, PartialEq, Copy, Clone, Eq, Ord, PartialOrd, Hash)] + #[derive(Debug, PartialEq, Default, Copy, Clone, Eq, Ord, PartialOrd, Hash)] pub struct OffsetDateTime; + + impl serde::Serialize for OffsetDateTime { + fn serialize(&self, serializer: S) -> Result { + "1970-01-01 00:00:00.00 +00:00:00".serialize(serializer) + } + } + + impl<'de> serde::Deserialize<'de> for OffsetDateTime { + fn deserialize>( + deserializer: D, + ) -> Result { + let _ = String::deserialize(deserializer)?; + Ok(OffsetDateTime::from_unix_timestamp(0).unwrap_or_default()) + } + } + impl OffsetDateTime { pub fn from_unix_timestamp(_: usize) -> Result { Ok(OffsetDateTime) } + pub fn unix_timestamp(self) -> i64 { + 0 + } + #[inline(always)] pub fn now_utc() -> Self { OffsetDateTime @@ -124,7 +154,7 @@ mod unix_epoch_stub_date { } #[inline(always)] - pub fn format(&self, format: impl ToString) -> String { + pub fn format(&self, _: impl ToString) -> String { // TODO "".into() } @@ -170,10 +200,7 @@ mod unix_epoch_stub_date { } } -#[cfg(any( - all(target_arch = "wasm32", target_os = "unknown"), - all(target_arch = "wasm32", target_os = "wasi") -))] +#[cfg(target_arch = "wasm32")] #[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct UtcOffset { hours: i8, @@ -181,11 +208,16 @@ pub struct UtcOffset { seconds: i8, } -#[cfg(any( - all(target_arch = "wasm32", target_os = "unknown"), - all(target_arch = "wasm32", target_os = "wasi") -))] +#[cfg(target_arch = "wasm32")] impl UtcOffset { + pub const fn from_hms(hours: i8, minutes: i8, seconds: i8) -> Result { + Ok(Self { + hours, + minutes, + seconds, + }) + } + pub const fn whole_hours(self) -> i8 { self.hours } diff --git a/src/deserialize.rs b/src/deserialize.rs index 22cca5e..83338ac 100644 --- a/src/deserialize.rs +++ b/src/deserialize.rs @@ -4,36 +4,42 @@ //! printpdf::PdfDocument. In particular, it decompresses the content streams and then //! converts lopdf operations to printpdf Ops. -use crate::date::{OffsetDateTime, UtcOffset}; use lopdf::{Dictionary as LopdfDictionary, Document as LopdfDocument, Object, ObjectId}; +use serde_derive::{Deserialize, Serialize}; use crate::{ - Color, LineDashPattern, Op, PageAnnotMap, PdfDocument, PdfDocumentInfo, PdfMetadata, PdfPage, - PdfResources, conformance::PdfConformance, + Color, DictItem, LineDashPattern, LinePoint, Op, PageAnnotMap, PdfDocument, PdfDocumentInfo, + PdfMetadata, PdfPage, PdfResources, PolygonRing, + conformance::PdfConformance, + date::{OffsetDateTime, UtcOffset}, }; -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] pub struct PdfParseOptions { pub fail_on_error: bool, } -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] pub struct PdfWarnMsg { pub page: usize, pub op_id: usize, - pub severity: &'static str, + pub severity: PdfParseErrorSeverity, pub msg: String, } -impl PdfWarnMsg { - pub const ERROR: &'static str = "error"; - pub const WARNING: &'static str = "warning"; +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +#[serde(rename = "kebab-case")] +pub enum PdfParseErrorSeverity { + Error, + Warning, +} +impl PdfWarnMsg { pub fn error(page: usize, op_id: usize, e: String) -> Self { PdfWarnMsg { page, op_id, - severity: Self::ERROR, + severity: PdfParseErrorSeverity::Error, msg: e, } } @@ -354,7 +360,11 @@ pub fn parse_op( )); out_ops.push(Op::Unknown { key: "Tf".into(), - value: op.operands.clone(), + value: op + .operands + .iter() + .map(|s| DictItem::from_lopdf(s)) + .collect(), }); } } @@ -789,7 +799,11 @@ pub fn parse_op( // fallback out_ops.push(Op::Unknown { key: op.operator.clone(), - value: op.operands.clone(), + value: op + .operands + .iter() + .map(|s| DictItem::from_lopdf(s)) + .collect(), }); } } @@ -832,7 +846,11 @@ pub fn parse_op( _ => { out_ops.push(Op::Unknown { key: op.operator.clone(), - value: op.operands.clone(), + value: op + .operands + .iter() + .map(|s| DictItem::from_lopdf(s)) + .collect(), }); } } @@ -844,7 +862,11 @@ pub fn parse_op( // sets the fill or stroke color space. Usually you'd store in state, or ignore: out_ops.push(Op::Unknown { key: op.operator.clone(), - value: op.operands.clone(), + value: op + .operands + .iter() + .map(|s| DictItem::from_lopdf(s)) + .collect(), }); } @@ -867,6 +889,14 @@ pub fn parse_op( op_id, format!("Info: unhandled operator '{}'", other), )); + out_ops.push(Op::Unknown { + key: op.operator.clone(), + value: op + .operands + .iter() + .map(|s| DictItem::from_lopdf(s)) + .collect(), + }); } } @@ -891,7 +921,18 @@ fn finalize_current_path_special( let rings = std::mem::take(&mut state.subpaths); let polygon = crate::graphics::Polygon { - rings, + rings: rings + .into_iter() + .map(|r| PolygonRing { + points: r + .into_iter() + .map(|lp| LinePoint { + p: lp.0, + bezier: lp.1, + }) + .collect(), + }) + .collect(), mode: paint_mode, winding_order: winding, }; @@ -917,7 +958,18 @@ fn finalize_current_path( let rings = std::mem::take(&mut state.subpaths); let polygon = crate::graphics::Polygon { - rings, + rings: rings + .into_iter() + .map(|r| PolygonRing { + points: r + .into_iter() + .map(|lp| LinePoint { + p: lp.0, + bezier: lp.1, + }) + .collect(), + }) + .collect(), mode: paint_mode, // For simplicity, we do not handle even-odd fill vs nonzero, etc. winding_order: crate::graphics::WindingOrder::NonZero, @@ -1058,12 +1110,7 @@ fn parse_pdf_date(s: &str) -> Result { _ => time::Month::January, }; - #[cfg(all(feature = "js-sys", target_arch = "wasm32", target_os = "unknown"))] - { - Ok(OffsetDateTime::from_unix_timestamp(0).unwrap()) - } - - #[cfg(not(all(feature = "js-sys", target_arch = "wasm32", target_os = "unknown")))] + #[cfg(all(feature = "js-sys", target_arch = "wasm32"))] { Ok(OffsetDateTime::new_in_offset( time::Date::from_calendar_date(year, month, day).map_err(|e| e.to_string())?, @@ -1071,6 +1118,11 @@ fn parse_pdf_date(s: &str) -> Result { UtcOffset::from_hms(0, 0, 0).map_err(|e| e.to_string())?, )) } + + #[cfg(not(all(feature = "js-sys", target_arch = "wasm32")))] + { + Ok(OffsetDateTime::from_unix_timestamp(0).unwrap()) + } } /// Helper to parse an operand into f32 diff --git a/src/font.rs b/src/font.rs index dc9ec1a..3dccef0 100644 --- a/src/font.rs +++ b/src/font.rs @@ -16,7 +16,9 @@ use allsorts::{ loca::{LocaOffsets, LocaTable}, }, }; +use base64::Engine; use lopdf::Object::{Array, Integer}; +use serde_derive::{Deserialize, Serialize}; use time::error::Parse; use crate::{FontId, Op, PdfPage}; @@ -31,7 +33,7 @@ pub enum Font { } /// Standard built-in PDF fonts -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] pub enum BuiltinFont { TimesRoman, TimesBold, @@ -225,14 +227,14 @@ impl BuiltinFont { } } -#[derive(Clone)] +#[derive(Clone, Default)] pub struct ParsedFont { pub font_metrics: FontMetrics, pub num_glyphs: u16, - pub hhea_table: HheaTable, + pub hhea_table: Option, pub hmtx_data: Vec, pub vmtx_data: Vec, - pub maxp_table: MaxpTable, + pub maxp_table: Option, pub gsub_cache: Option>, pub gpos_cache: Option>, pub opt_gdef_table: Option>, @@ -243,6 +245,31 @@ pub struct ParsedFont { pub original_index: usize, } +const FONT_B64_START: &str = "data:font/ttf;base64,"; + +impl serde::Serialize for ParsedFont { + fn serialize(&self, serializer: S) -> Result { + let s = format!( + "{FONT_B64_START}{}", + base64::prelude::BASE64_STANDARD.encode(&self.original_bytes) + ); + s.serialize(serializer) + } +} + +impl<'de> serde::Deserialize<'de> for ParsedFont { + fn deserialize>(deserializer: D) -> Result { + let s = String::deserialize(deserializer)?; + let b64 = if s.starts_with(FONT_B64_START) { + let b = &s[FONT_B64_START.len()..]; + base64::prelude::BASE64_STANDARD.decode(&b).ok() + } else { + None + }; + Ok(ParsedFont::from_bytes(&b64.unwrap_or_default(), 0).unwrap_or_default()) + } +} + impl PartialEq for ParsedFont { fn eq(&self, other: &Self) -> bool { self.font_metrics == other.font_metrics @@ -774,10 +801,10 @@ impl ParsedFont { let mut font = ParsedFont { font_metrics, num_glyphs, - hhea_table, + hhea_table: Some(hhea_table), hmtx_data, vmtx_data, - maxp_table, + maxp_table: Some(maxp_table), gsub_cache, gpos_cache, opt_gdef_table, @@ -796,14 +823,11 @@ impl ParsedFont { fn get_space_width_internal(&mut self) -> Option { let glyph_index = self.lookup_glyph_index(' ' as u32)?; - allsorts::glyph_info::advance( - &self.maxp_table, - &self.hhea_table, - &self.hmtx_data, - glyph_index, - ) - .ok() - .map(|s| s as usize) + let maxp_table = self.maxp_table.as_ref()?; + let hhea_table = self.hhea_table.as_ref()?; + allsorts::glyph_info::advance(&maxp_table, &hhea_table, &self.hmtx_data, glyph_index) + .ok() + .map(|s| s as usize) } /// Returns the width of the space " " character (unscaled units) diff --git a/src/graphics.rs b/src/graphics.rs index 3aeb108..6d40ac0 100644 --- a/src/graphics.rs +++ b/src/graphics.rs @@ -1,6 +1,7 @@ use std::collections::HashSet; use lopdf::Dictionary as LoDictionary; +use serde_derive::{Deserialize, Serialize}; use crate::{ FontId, @@ -25,7 +26,7 @@ pub const OP_PATH_CONST_CLIP_NZ: &str = "W"; pub const OP_PATH_CONST_CLIP_EO: &str = "W*"; /// Rectangle struct (x, y, width, height) from the LOWER LEFT corner of the page -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct Rect { pub x: Pt, pub y: Pt, @@ -59,7 +60,9 @@ impl Rect { pub fn to_polygon(&self) -> Polygon { Polygon { - rings: vec![self.gen_points()], + rings: vec![PolygonRing { + points: self.gen_points(), + }], mode: PaintMode::Fill, winding_order: WindingOrder::NonZero, } @@ -72,7 +75,7 @@ impl Rect { } } - fn gen_points(&self) -> Vec<(Point, bool)> { + fn gen_points(&self) -> Vec { let top = self.y; let bottom = Pt(self.y.0 - self.height.0); let left = self.x; @@ -86,7 +89,24 @@ impl Rect { }; let bl = Point { x: left, y: bottom }; - vec![(tl, false), (tr, false), (br, false), (bl, false)] + vec![ + LinePoint { + p: tl, + bezier: false, + }, + LinePoint { + p: tr, + bezier: false, + }, + LinePoint { + p: br, + bezier: false, + }, + LinePoint { + p: bl, + bezier: false, + }, + ] } pub fn to_array(&self) -> Vec { @@ -111,7 +131,8 @@ impl Rect { /// Most of the time, `NonZero` is the appropriate option. /// /// [clip]: PaintMode::Clip -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum WindingOrder { /// Make any filling or clipping paint operators follow the _even-odd rule_. /// @@ -173,7 +194,8 @@ impl WindingOrder { } /// The path-painting mode for a path. -#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum PaintMode { /// Set the path in clipping mode instead of painting it. /// @@ -192,7 +214,7 @@ pub enum PaintMode { FillStroke, } -#[derive(Debug, Copy, Clone)] +#[derive(Debug, Default, Copy, Clone, Serialize, Deserialize)] pub struct Point { /// x position from the bottom left corner in pt pub x: Pt, @@ -235,58 +257,81 @@ impl PartialEq for Point { } } -#[derive(Debug, Clone, PartialEq, Default)] +/// Either a point or a bezier control point +#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] +pub struct LinePoint { + /// Location of the point + pub p: Point, + /// If `true`, this point is a bezier control point + pub bezier: bool, +} + +#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] pub struct Line { - /// 2D Points for the line. The `bool` indicates whether the next point is a bezier control - /// point. - pub points: Vec<(Point, bool)>, + /// 2D Points for the line + pub points: Vec, /// Whether the line should automatically be closed pub is_closed: bool, } -#[derive(Debug, Clone, PartialEq, Default)] +#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] pub struct Polygon { /// 2D Points for the line. The `bool` indicates whether the next point is a bezier control /// point. - pub rings: Vec>, + pub rings: Vec, /// What type of polygon is this? pub mode: PaintMode, /// Winding order to use for constructing this polygon pub winding_order: WindingOrder, } +#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] +pub struct PolygonRing { + /// 2D Points for the ring + pub points: Vec, +} + impl FromIterator<(Point, bool)> for Polygon { fn from_iter>(iter: I) -> Self { let mut points = Vec::new(); for i in iter { - points.push(i); + points.push(LinePoint { + p: i.0, + bezier: i.1, + }); } Polygon { - rings: vec![points], + rings: vec![PolygonRing { points }], ..Default::default() } } } /// Line dash pattern is made up of a total width -#[derive(Debug, Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] pub struct LineDashPattern { /// Offset at which the dashing pattern should start, measured from the beginning ot the line /// Default: 0 (start directly where the line starts) pub offset: i64, /// Length of the first dash in the dash pattern. If `None`, the line will be solid (good for /// resetting the dash pattern) + #[serde(default)] pub dash_1: Option, /// Whitespace after the first dash. If `None`, whitespace will be the same as length_1st, /// meaning that the line will have dash - whitespace - dash - whitespace in even offsets + #[serde(default)] pub gap_1: Option, /// Length of the second dash in the dash pattern. If None, will be equal to length_1st + #[serde(default)] pub dash_2: Option, /// Same as whitespace_1st, but for length_2nd + #[serde(default)] pub gap_2: Option, /// Length of the second dash in the dash pattern. If None, will be equal to length_1st + #[serde(default)] pub dash_3: Option, /// Same as whitespace_1st, but for length_3rd + #[serde(default)] pub gap_3: Option, } @@ -369,7 +414,8 @@ impl LineDashPattern { } /// __See PDF Reference Page 216__ - Line join style -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum LineJoinStyle { /// Miter join. The outer edges of the strokes for the two segments are extended /// until they meet at an angle, as in a picture frame. If the segments meet at too @@ -403,7 +449,8 @@ impl LineJoinStyle { /// fill color. /// /// See PDF Reference 1.7 Page 402 -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum TextRenderingMode { Fill, Stroke, @@ -431,7 +478,8 @@ impl TextRenderingMode { } /// __See PDF Reference (Page 216)__ - Line cap (ending) style -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum LineCapStyle { /// Butt cap. The stroke is squared off at the endpoint of the path. There is no /// projection beyond the end of the path. @@ -454,50 +502,43 @@ impl LineCapStyle { } } -// identifiers for tracking the changed fields -pub(crate) const LINE_WIDTH: &str = "line_width"; -pub(crate) const LINE_CAP: &str = "line_cap"; -pub(crate) const LINE_JOIN: &str = "line_join"; -pub(crate) const MITER_LIMIT: &str = "miter_limit"; -pub(crate) const LINE_DASH_PATTERN: &str = "line_dash_pattern"; -pub(crate) const RENDERING_INTENT: &str = "rendering_intent"; -pub(crate) const OVERPRINT_STROKE: &str = "overprint_stroke"; -pub(crate) const OVERPRINT_FILL: &str = "overprint_fill"; -pub(crate) const OVERPRINT_MODE: &str = "overprint_mode"; -pub(crate) const FONT: &str = "font"; -pub(crate) const BLACK_GENERATION: &str = "black_generation"; -pub(crate) const BLACK_GENERATION_EXTRA: &str = "black_generation_extra"; -pub(crate) const UNDERCOLOR_REMOVAL: &str = "under_color_removal"; -pub(crate) const UNDERCOLOR_REMOVAL_EXTRA: &str = "undercolor_removal_extra"; -pub(crate) const TRANSFER_FUNCTION: &str = "transfer_function"; -pub(crate) const TRANSFER_FUNCTION_EXTRA: &str = "transfer_function_extra"; -pub(crate) const HALFTONE_DICTIONARY: &str = "halftone_dictionary"; -pub(crate) const FLATNESS_TOLERANCE: &str = "flatness_tolerance"; -pub(crate) const SMOOTHNESS_TOLERANCE: &str = "smoothness_tolerance"; -pub(crate) const STROKE_ADJUSTMENT: &str = "stroke_adjustment"; -pub(crate) const BLEND_MODE: &str = "blend_mode"; -pub(crate) const SOFT_MASK: &str = "soft_mask"; -pub(crate) const CURRENT_STROKE_ALPHA: &str = "current_stroke_alpha"; -pub(crate) const CURRENT_FILL_ALPHA: &str = "current_fill_alpha"; -pub(crate) const ALPHA_IS_SHAPE: &str = "alpha_is_shape"; -pub(crate) const TEXT_KNOCKOUT: &str = "text_knockout"; +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename = "kebap-case")] +pub enum ChangedField { + LineWidth, + LineCap, + LineJoin, + MiterLimit, + LineDashPattern, + RenderingIntent, + OverprintStroke, + OverprintFill, + OverprintMode, + Font, + BlackGeneration, + BlackGenerationExtra, + UnderColorRemoval, + UnderColorRemovalExtra, + TransferFunction, + TransferFunctionExtra, + HalftoneDictionary, + FlatnessTolerance, + SmoothnessTolerance, + StrokeAdjustment, + BlendMode, + SoftMask, + CurrentStrokeAlpha, + CurrentFillAlpha, + AlphaIsShape, + TextKnockout, +} /// `ExtGState` dictionary -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct ExtendedGraphicsState { - /* /Type ExtGState */ - /// NOTE: We need to track which fields have changed in relation to the default() method. - /// This is because we want to optimize out the fields that haven't changed in relation - /// to the last graphics state. Please use only the constants defined in this module for - /// declaring the changed fields. The way to go about this is to first convert the ExtGState - /// into a vector of operations and then remove all operations that are unnecessary - /// before writing the document. - /// - /// If you are unsure about this, please use the `.with_[field name]` method. These methods - /// will set the `changed_fields` to the correct values. If you want to take care of this field - /// manually: Every time you change a field on the ExtGState dicitionary, you have to add the - /// string identifier of that field into the `changed_fields` vector. - pub(crate) changed_fields: HashSet<&'static str>, + /// A set to track which fields have changed in relation to the default() method. + /// Now using a strongly typed enum instead of string constants. + pub(crate) changed_fields: HashSet, /* LW float */ /// __(Optional; PDF 1.3)__ The current line width @@ -516,143 +557,91 @@ pub struct ExtendedGraphicsState { pub(crate) miter_limit: f32, /* D array */ - /// __(Optional; PDF 1.3)__ The line dash pattern, expressed as an array of the form - /// [ dashArray dashPhase ] , where dashArray is itself an array and dashPhase is an - /// integer (see “Line Dash Pattern” on page 217). + /// __(Optional; PDF 1.3)__ The line dash pattern. pub(crate) line_dash_pattern: Option, /* RI name (or ri inside a stream) */ - /// __(Optional; PDF 1.3)__ The name of the rendering intent (see “Rendering - /// Intents” on page 260). + /// __(Optional; PDF 1.3)__ The name of the rendering intent. pub(crate) rendering_intent: RenderingIntent, /* OP boolean */ - /// __(Optional)__ A flag specifying whether to apply overprint (see Section 4.5.6, - /// “Overprint Control”). In PDF 1.2 and earlier, there is a single overprint - /// parameter that applies to all painting operations. Beginning with PDF 1.3, - /// there are two separate overprint parameters: one for stroking and one for all - /// other painting operations. Specifying an OP entry sets both parameters un- - /// less there is also an op entry in the same graphics state parameter dictionary, - /// in which case the OP entry sets only the overprint parameter for stroking. + /// __(Optional)__ Overprint flag for stroking. pub(crate) overprint_stroke: bool, /* op boolean */ - /// __(Optional; PDF 1.3)__ A flag specifying whether to apply overprint (see Section - /// 4.5.6, “Overprint Control”) for painting operations other than stroking. If - /// this entry is absent, the OP entry, if any, sets this parameter. + /// __(Optional; PDF 1.3)__ Overprint flag for nonstroking. pub(crate) overprint_fill: bool, /* OPM integer */ - /// __(Optional; PDF 1.3)__ The overprint mode (see Section 4.5.6, “Overprint Control”) - /// Initial value: `EraseUnderlying` + /// __(Optional; PDF 1.3)__ The overprint mode. pub(crate) overprint_mode: OverprintMode, /* Font array */ - /// Font structure, expects a dictionary, + /// Font structure, expects a dictionary. pub(crate) font: Option, /* BG function */ - /// __(Optional)__ The black-generation function, which maps the interval [ 0.0 1.0 ] - /// to the interval [ 0.0 1.0 ] (see Section 6.2.3, “Conversion from DeviceRGB to - /// DeviceCMYK”) + /// __(Optional)__ The black-generation function. pub(crate) black_generation: Option, /* BG2 function or name */ - /// __(Optional; PDF 1.3)__ Same as BG except that the value may also be the name - /// Default , denoting the black-generation function that was in effect at the start - /// of the page. If both BG and BG2 are present in the same graphics state param- - /// eter dictionary, BG2 takes precedence. + /// __(Optional; PDF 1.3)__ The extra black-generation function. pub(crate) black_generation_extra: Option, /* UCR function */ - /// __(Optional)__ The undercolor-removal function, which maps the interval - /// [ 0.0 1.0 ] to the interval [ −1.0 1.0 ] (see Section 6.2.3, “Conversion from - /// DeviceRGB to DeviceCMYK”). + /// __(Optional)__ The undercolor-removal function. pub(crate) under_color_removal: Option, /* UCR2 function */ - /// __(Optional; PDF 1.3)__ Same as UCR except that the value may also be the name - /// Default , denoting the undercolor-removal function that was in effect at the - /// start of the page. If both UCR and UCR2 are present in the same graphics state - /// parameter dictionary, UCR2 takes precedence. + /// __(Optional; PDF 1.3)__ The extra undercolor-removal function. pub(crate) under_color_removal_extra: Option, /* TR function */ - /// __(Optional)__ The transfer function, which maps the interval [ 0.0 1.0 ] to the in- - /// terval [ 0.0 1.0 ] (see Section 6.3, “Transfer Functions”). The value is either a - /// single function (which applies to all process colorants) or an array of four - /// functions (which apply to the process colorants individually). The name - /// Identity may be used to represent the identity function. + /// __(Optional)__ The transfer function. pub(crate) transfer_function: Option, /* TR2 function */ - /// __(Optional; PDF 1.3)__ Same as TR except that the value may also be the name - /// Default , denoting the transfer function that was in effect at the start of the - /// page. If both TR and TR2 are present in the same graphics state parameter dic- - /// tionary, TR2 takes precedence. + /// __(Optional; PDF 1.3)__ The extra transfer function. pub(crate) transfer_extra_function: Option, /* HT [dictionary, stream or name] */ - /// __(Optional)__ The halftone dictionary or stream (see Section 6.4, “Halftones”) or - /// the name Default , denoting the halftone that was in effect at the start of the - /// page. + /// __(Optional)__ The halftone dictionary or stream. pub(crate) halftone_dictionary: Option, /* FL integer */ - /// __(Optional; PDF 1.3)__ The flatness tolerance (see Section 6.5.1, “Flatness Toler- - /// ance”). + /// __(Optional; PDF 1.3)__ The flatness tolerance. pub(crate) flatness_tolerance: f32, /* SM integer */ - /// __(Optional; PDF 1.3)__ The smoothness tolerance (see Section 6.5.2, “Smooth- - /// ness Tolerance”). + /// __(Optional; PDF 1.3)__ The smoothness tolerance. pub(crate) smoothness_tolerance: f32, /* SA integer */ - /// (Optional) A flag specifying whether to apply automatic stroke adjustment - /// (see Section 6.5.4, “Automatic Stroke Adjustment”). + /// (Optional) Automatic stroke adjustment flag. pub(crate) stroke_adjustment: bool, /* BM name or array */ - /// __(Optional; PDF 1.4)__ The current blend mode to be used in the transparent - /// imaging model (see Sections 7.2.4, “Blend Mode,” and 7.5.2, “Specifying - /// Blending Color Space and Blend Mode”). + /// __(Optional; PDF 1.4)__ The blend mode. pub(crate) blend_mode: BlendMode, /* SM dictionary or name */ - /// __(Optional; PDF 1.4)__ The current soft mask, specifying the mask shape or - /// mask opacity values to be used in the transparent imaging model (see - /// “Source Shape and Opacity” on page 526 and “Mask Shape and Opacity” on - /// page 550). - /// - /// *Note:* Although the current soft mask is sometimes referred to as a “soft clip,” - /// altering it with the gs operator completely replaces the old value with the new - /// one, rather than intersecting the two as is done with the current clipping path - /// parameter (see Section 4.4.3, “Clipping Path Operators”). + /// __(Optional; PDF 1.4)__ The soft mask. pub(crate) soft_mask: Option, /* CA integer */ - /// __(Optional; PDF 1.4)__ The current stroking alpha constant, specifying the con- - /// stant shape or constant opacity value to be used for stroking operations in the - /// transparent imaging model (see “Source Shape and Opacity” on page 526 and - /// “Constant Shape and Opacity” on page 551). + /// __(Optional; PDF 1.4)__ The current stroking alpha constant. pub(crate) current_stroke_alpha: f32, /* ca integer */ - /// __(Optional; PDF 1.4)__ Same as CA , but for nonstroking operations. + /// __(Optional; PDF 1.4)__ The current nonstroking alpha constant. pub(crate) current_fill_alpha: f32, /* AIS boolean */ - /// __(Optional; PDF 1.4)__ The alpha source flag (“alpha is shape”), specifying - /// whether the current soft mask and alpha constant are to be interpreted as - /// shape values ( true ) or opacity values ( false ) - /// true if the soft mask contains shape values, false for opacity + /// __(Optional; PDF 1.4)__ The alpha source flag. pub(crate) alpha_is_shape: bool, /* TK boolean */ - /// __(Optional; PDF 1.4)__ The text knockout flag, which determines the behavior of - /// overlapping glyphs within a text object in the transparent imaging model (see - /// Section 5.2.7, “Text Knockout”). + /// __(Optional; PDF 1.4)__ The text knockout flag. pub(crate) text_knockout: bool, } @@ -663,137 +652,153 @@ pub fn extgstate_to_dict(val: &ExtendedGraphicsState) -> LoDictionary { let mut gs_operations = Vec::<(String, lopdf::Object)>::new(); - // for each field, look if it was contained in the "changed fields" - if val.changed_fields.contains(LINE_WIDTH) { + if val.changed_fields.contains(&ChangedField::LineWidth) { gs_operations.push(("LW".to_string(), Real(val.line_width))); } - if val.changed_fields.contains(LINE_CAP) { + if val.changed_fields.contains(&ChangedField::LineCap) { gs_operations.push(("LC".to_string(), Integer(val.line_cap.id()))); } - if val.changed_fields.contains(LINE_JOIN) { + if val.changed_fields.contains(&ChangedField::LineJoin) { gs_operations.push(("LJ".to_string(), Integer(val.line_join.id()))); } - if val.changed_fields.contains(MITER_LIMIT) { + if val.changed_fields.contains(&ChangedField::MiterLimit) { gs_operations.push(("ML".to_string(), Real(val.miter_limit))); } - if val.changed_fields.contains(FLATNESS_TOLERANCE) { + if val + .changed_fields + .contains(&ChangedField::FlatnessTolerance) + { gs_operations.push(("FL".to_string(), Real(val.flatness_tolerance))); } - if val.changed_fields.contains(RENDERING_INTENT) { + if val.changed_fields.contains(&ChangedField::RenderingIntent) { gs_operations.push(("RI".to_string(), Name(val.rendering_intent.get_id().into()))); } - if val.changed_fields.contains(STROKE_ADJUSTMENT) { + if val.changed_fields.contains(&ChangedField::StrokeAdjustment) { gs_operations.push(("SA".to_string(), Boolean(val.stroke_adjustment))); } - if val.changed_fields.contains(OVERPRINT_FILL) { + if val.changed_fields.contains(&ChangedField::OverprintFill) { gs_operations.push(("OP".to_string(), Boolean(val.overprint_fill))); } - if val.changed_fields.contains(OVERPRINT_STROKE) { + if val.changed_fields.contains(&ChangedField::OverprintStroke) { gs_operations.push(("op".to_string(), Boolean(val.overprint_stroke))); } - if val.changed_fields.contains(OVERPRINT_MODE) { + if val.changed_fields.contains(&ChangedField::OverprintMode) { gs_operations.push(("OPM".to_string(), Integer(val.overprint_mode.get_id()))); } - if val.changed_fields.contains(CURRENT_FILL_ALPHA) { + if val.changed_fields.contains(&ChangedField::CurrentFillAlpha) { gs_operations.push(("CA".to_string(), Real(val.current_fill_alpha))); } - if val.changed_fields.contains(CURRENT_STROKE_ALPHA) { + if val + .changed_fields + .contains(&ChangedField::CurrentStrokeAlpha) + { gs_operations.push(("ca".to_string(), Real(val.current_stroke_alpha))); } - if val.changed_fields.contains(BLEND_MODE) { + if val.changed_fields.contains(&ChangedField::BlendMode) { gs_operations.push(("BM".to_string(), Name(val.blend_mode.get_id().into()))); } - if val.changed_fields.contains(ALPHA_IS_SHAPE) { + if val.changed_fields.contains(&ChangedField::AlphaIsShape) { gs_operations.push(("AIS".to_string(), Boolean(val.alpha_is_shape))); } - if val.changed_fields.contains(TEXT_KNOCKOUT) { + if val.changed_fields.contains(&ChangedField::TextKnockout) { gs_operations.push(("TK".to_string(), Boolean(val.text_knockout))); } - // set optional parameters + // Optional parameters if let Some(ldp) = val.line_dash_pattern { - if val.changed_fields.contains(LINE_DASH_PATTERN) { + if val.changed_fields.contains(&ChangedField::LineDashPattern) { let array = ldp.as_array().into_iter().map(Integer).collect(); gs_operations.push(("D".to_string(), Array(array))); } } if let Some(font) = val.font.as_ref() { - if val.changed_fields.contains(FONT) { + if val.changed_fields.contains(&ChangedField::Font) { gs_operations.push(("Font".to_string(), Name(font.0.clone().into_bytes()))); } } - // todo: transfer functions, halftone functions, - // black generation, undercolor removal - // these types cannot yet be converted into lopdf::Objects, - // need to implement Into for them - - if val.changed_fields.contains(BLACK_GENERATION) { - if let Some(ref black_generation) = val.black_generation { + // TODO: Handle transfer functions, halftone dictionary, black generation, etc. + if val.changed_fields.contains(&ChangedField::BlackGeneration) { + if let Some(ref _black_generation) = val.black_generation { // TODO } } - if val.changed_fields.contains(BLACK_GENERATION_EXTRA) { - if let Some(ref black_generation_extra) = val.black_generation_extra { + if val + .changed_fields + .contains(&ChangedField::BlackGenerationExtra) + { + if let Some(ref _black_generation_extra) = val.black_generation_extra { // TODO } } - if val.changed_fields.contains(UNDERCOLOR_REMOVAL) { - if let Some(ref under_color_removal) = val.under_color_removal { + if val + .changed_fields + .contains(&ChangedField::UnderColorRemoval) + { + if let Some(ref _under_color_removal) = val.under_color_removal { // TODO } } - if val.changed_fields.contains(UNDERCOLOR_REMOVAL_EXTRA) { - if let Some(ref under_color_removal_extra) = val.under_color_removal_extra { + if val + .changed_fields + .contains(&ChangedField::UnderColorRemovalExtra) + { + if let Some(ref _under_color_removal_extra) = val.under_color_removal_extra { // TODO } } - if val.changed_fields.contains(TRANSFER_FUNCTION) { - if let Some(ref transfer_function) = val.transfer_function { + if val.changed_fields.contains(&ChangedField::TransferFunction) { + if let Some(ref _transfer_function) = val.transfer_function { // TODO } } - if val.changed_fields.contains(TRANSFER_FUNCTION_EXTRA) { - if let Some(ref transfer_extra_function) = val.transfer_extra_function { + if val + .changed_fields + .contains(&ChangedField::TransferFunctionExtra) + { + if let Some(ref _transfer_extra_function) = val.transfer_extra_function { // TODO } } - if val.changed_fields.contains(HALFTONE_DICTIONARY) { - if let Some(ref halftone_dictionary) = val.halftone_dictionary { + if val + .changed_fields + .contains(&ChangedField::HalftoneDictionary) + { + if let Some(ref _halftone_dictionary) = val.halftone_dictionary { // TODO } } - if val.changed_fields.contains(SOFT_MASK) { - if let Some(ref soft_mask) = val.soft_mask { + if val.changed_fields.contains(&ChangedField::SoftMask) { + if let Some(ref _soft_mask) = val.soft_mask { + // Soft mask conversion can be handled here. } else { gs_operations.push(("SM".to_string(), Name("None".as_bytes().to_vec()))); } } - // if there are operations, push the "Type > ExtGState" - // otherwise, just return an empty dictionary + // If there are any operations, add the "Type" key if !gs_operations.is_empty() { gs_operations.push(("Type".to_string(), "ExtGState".into())); } @@ -817,7 +822,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_line_width(mut self, line_width: f32) -> Self { self.gs.line_width = line_width; - self.gs.changed_fields.insert(LINE_WIDTH); + self.gs.changed_fields.insert(ChangedField::LineWidth); self } @@ -825,7 +830,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_line_cap(mut self, line_cap: LineCapStyle) -> Self { self.gs.line_cap = line_cap; - self.gs.changed_fields.insert(LINE_CAP); + self.gs.changed_fields.insert(ChangedField::LineCap); self } @@ -833,7 +838,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_line_join(mut self, line_join: LineJoinStyle) -> Self { self.gs.line_join = line_join; - self.gs.changed_fields.insert(LINE_JOIN); + self.gs.changed_fields.insert(ChangedField::LineJoin); self } @@ -841,7 +846,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_miter_limit(mut self, miter_limit: f32) -> Self { self.gs.miter_limit = miter_limit; - self.gs.changed_fields.insert(MITER_LIMIT); + self.gs.changed_fields.insert(ChangedField::MiterLimit); self } @@ -849,7 +854,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_rendering_intent(mut self, rendering_intent: RenderingIntent) -> Self { self.gs.rendering_intent = rendering_intent; - self.gs.changed_fields.insert(RENDERING_INTENT); + self.gs.changed_fields.insert(ChangedField::RenderingIntent); self } @@ -857,7 +862,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_overprint_stroke(mut self, overprint_stroke: bool) -> Self { self.gs.overprint_stroke = overprint_stroke; - self.gs.changed_fields.insert(OVERPRINT_STROKE); + self.gs.changed_fields.insert(ChangedField::OverprintStroke); self } @@ -865,7 +870,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_overprint_fill(mut self, overprint_fill: bool) -> Self { self.gs.overprint_fill = overprint_fill; - self.gs.changed_fields.insert(OVERPRINT_FILL); + self.gs.changed_fields.insert(ChangedField::OverprintFill); self } @@ -873,7 +878,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_overprint_mode(mut self, overprint_mode: OverprintMode) -> Self { self.gs.overprint_mode = overprint_mode; - self.gs.changed_fields.insert(OVERPRINT_MODE); + self.gs.changed_fields.insert(ChangedField::OverprintMode); self } @@ -882,7 +887,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_font(mut self, font: Option) -> Self { self.gs.font = font; - self.gs.changed_fields.insert(FONT); + self.gs.changed_fields.insert(ChangedField::Font); self } @@ -893,7 +898,7 @@ impl ExtendedGraphicsStateBuilder { black_generation: Option, ) -> Self { self.gs.black_generation = black_generation; - self.gs.changed_fields.insert(BLACK_GENERATION); + self.gs.changed_fields.insert(ChangedField::BlackGeneration); self } @@ -904,7 +909,9 @@ impl ExtendedGraphicsStateBuilder { black_generation_extra: Option, ) -> Self { self.gs.black_generation_extra = black_generation_extra; - self.gs.changed_fields.insert(BLACK_GENERATION_EXTRA); + self.gs + .changed_fields + .insert(ChangedField::BlackGenerationExtra); self } @@ -915,7 +922,9 @@ impl ExtendedGraphicsStateBuilder { under_color_removal: Option, ) -> Self { self.gs.under_color_removal = under_color_removal; - self.gs.changed_fields.insert(UNDERCOLOR_REMOVAL); + self.gs + .changed_fields + .insert(ChangedField::UnderColorRemoval); self } @@ -926,7 +935,9 @@ impl ExtendedGraphicsStateBuilder { under_color_removal_extra: Option, ) -> Self { self.gs.under_color_removal_extra = under_color_removal_extra; - self.gs.changed_fields.insert(UNDERCOLOR_REMOVAL_EXTRA); + self.gs + .changed_fields + .insert(ChangedField::UnderColorRemovalExtra); self } @@ -934,7 +945,9 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_transfer(mut self, transfer_function: Option) -> Self { self.gs.transfer_function = transfer_function; - self.gs.changed_fields.insert(TRANSFER_FUNCTION); + self.gs + .changed_fields + .insert(ChangedField::TransferFunction); self } @@ -945,7 +958,9 @@ impl ExtendedGraphicsStateBuilder { transfer_extra_function: Option, ) -> Self { self.gs.transfer_extra_function = transfer_extra_function; - self.gs.changed_fields.insert(TRANSFER_FUNCTION_EXTRA); + self.gs + .changed_fields + .insert(ChangedField::TransferFunctionExtra); self } @@ -953,7 +968,9 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_halftone(mut self, halftone_type: Option) -> Self { self.gs.halftone_dictionary = halftone_type; - self.gs.changed_fields.insert(HALFTONE_DICTIONARY); + self.gs + .changed_fields + .insert(ChangedField::HalftoneDictionary); self } @@ -961,7 +978,9 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_flatness_tolerance(mut self, flatness_tolerance: f32) -> Self { self.gs.flatness_tolerance = flatness_tolerance; - self.gs.changed_fields.insert(FLATNESS_TOLERANCE); + self.gs + .changed_fields + .insert(ChangedField::FlatnessTolerance); self } @@ -969,7 +988,9 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_smoothness_tolerance(mut self, smoothness_tolerance: f32) -> Self { self.gs.smoothness_tolerance = smoothness_tolerance; - self.gs.changed_fields.insert(SMOOTHNESS_TOLERANCE); + self.gs + .changed_fields + .insert(ChangedField::SmoothnessTolerance); self } @@ -977,7 +998,9 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_stroke_adjustment(mut self, stroke_adjustment: bool) -> Self { self.gs.stroke_adjustment = stroke_adjustment; - self.gs.changed_fields.insert(STROKE_ADJUSTMENT); + self.gs + .changed_fields + .insert(ChangedField::StrokeAdjustment); self } @@ -985,7 +1008,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_blend_mode(mut self, blend_mode: BlendMode) -> Self { self.gs.blend_mode = blend_mode; - self.gs.changed_fields.insert(BLEND_MODE); + self.gs.changed_fields.insert(ChangedField::BlendMode); self } @@ -993,7 +1016,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_soft_mask(mut self, soft_mask: Option) -> Self { self.gs.soft_mask = soft_mask; - self.gs.changed_fields.insert(SOFT_MASK); + self.gs.changed_fields.insert(ChangedField::SoftMask); self } @@ -1001,7 +1024,9 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_current_stroke_alpha(mut self, current_stroke_alpha: f32) -> Self { self.gs.current_stroke_alpha = current_stroke_alpha; - self.gs.changed_fields.insert(CURRENT_STROKE_ALPHA); + self.gs + .changed_fields + .insert(ChangedField::CurrentStrokeAlpha); self } @@ -1009,7 +1034,9 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_current_fill_alpha(mut self, current_fill_alpha: f32) -> Self { self.gs.current_fill_alpha = current_fill_alpha; - self.gs.changed_fields.insert(CURRENT_FILL_ALPHA); + self.gs + .changed_fields + .insert(ChangedField::CurrentFillAlpha); self } @@ -1017,7 +1044,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_alpha_is_shape(mut self, alpha_is_shape: bool) -> Self { self.gs.alpha_is_shape = alpha_is_shape; - self.gs.changed_fields.insert(ALPHA_IS_SHAPE); + self.gs.changed_fields.insert(ChangedField::AlphaIsShape); self } @@ -1025,7 +1052,7 @@ impl ExtendedGraphicsStateBuilder { #[inline] pub fn with_text_knockout(mut self, text_knockout: bool) -> Self { self.gs.text_knockout = text_knockout; - self.gs.changed_fields.insert(TEXT_KNOCKOUT); + self.gs.changed_fields.insert(ChangedField::TextKnockout); self } @@ -1075,7 +1102,8 @@ impl Default for ExtendedGraphicsState { /// in a `DeviceCMYK` color space should erase that component (`EraseUnderlying`) or /// leave it unchanged (`KeepUnderlying`) when overprinting (see Section 4.5.6, “Over- /// print Control”). Initial value: `EraseUnderlying` -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum OverprintMode { /// Erase underlying color when overprinting EraseUnderlying, /* 0, default */ @@ -1094,7 +1122,8 @@ impl OverprintMode { /// Black generation calculates the amount of black to be used when trying to /// reproduce a particular color. -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum BlackGenerationFunction { /// Regular black generation function /// @@ -1117,7 +1146,8 @@ pub enum BlackGenerationFunction { WithUnderColorRemoval, } -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum BlackGenerationExtraFunction {} /// See `BlackGenerationFunction`, too. Undercolor removal reduces the amounts @@ -1129,18 +1159,22 @@ pub enum BlackGenerationExtraFunction {} /// components. It can simply return its k operand unchanged, or it can return 0.0 /// (so that no color is removed), some fraction of the black amount, or even a /// negative amount, thereby adding to the total amount of colorant. -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum UnderColorRemovalFunction { Default, } -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum UnderColorRemovalExtraFunction {} -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum TransferFunction {} -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum TransferExtraFunction {} /// In PDF 1.2, the graphics state includes a current halftone parameter, @@ -1163,7 +1197,8 @@ pub enum TransferExtraFunction {} >> */ /// Deserialized into Integer: 1, 5, 6, 10 or 16 -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case", tag = "type", content = "data")] pub enum HalftoneType { /// 1: Defines a single halftone screen by a frequency, angle, and spot function Type1(f32, f32, SpotFunction), @@ -1198,7 +1233,8 @@ impl HalftoneType { /// Spot functions, Table 6.1, Page 489 in Pdf Reference v1.7 /// The code is pseudo code, returning the grey component at (x, y). -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum SpotFunction { /// `1 - (pow(x, 2) + pow(y, 2))` SimpleDot, @@ -1269,7 +1305,8 @@ pub enum SpotFunction { Diamond, } -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case", tag = "type", content = "data")] pub enum BlendMode { Seperable(SeperableBlendMode), NonSeperable(NonSeperableBlendMode), @@ -1359,7 +1396,8 @@ impl BlendMode { /// /// The function simply notes the formula that has to be applied to (`color_new`, `color_old`) in /// order to get the desired effect. You have to run each formula once for each color channel. -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum SeperableBlendMode { /// Selects the source color, ignoring the old color. Default mode. /// @@ -1560,7 +1598,8 @@ pub enum SeperableBlendMode { /// /// For the K component, the result is the K component of Cb for the Hue, Saturation, and /// Color blend modes; it is the K component of Cs for the Luminosity blend mode. -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum NonSeperableBlendMode { Hue, Saturation, @@ -1575,7 +1614,8 @@ pub enum NonSeperableBlendMode { /// made among various properties of a color specification when rendering colors for /// a given device. Specifying a rendering intent (PDF 1.1) allows a PDF file to set priorities /// regarding which of these properties to preserve and which to sacrifice. -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename = "kebab-case")] pub enum RenderingIntent { /// Colors are represented solely with respect to the light source; no /// correction is made for the output medium’s white point (such as @@ -1633,7 +1673,8 @@ impl RenderingIntent { /// Can also be used for Vignettes, etc. /// Beware of color spaces! /// __See PDF Reference Page 545__ - Soft masks -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[serde(rename = "kebab-case")] pub struct SoftMask { /// The data to be used as a soft mask data: Vec, @@ -1641,7 +1682,8 @@ pub struct SoftMask { bits_per_component: u8, } -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename = "kebab-case")] pub enum SoftMaskFunction { // (Color, Shape, Alpha) = Composite(Color0, Alpha0, Group) /// In this function, the old (backdrop) color does not contribute to the result. diff --git a/src/html.rs b/src/html.rs index 3b49349..e592509 100644 --- a/src/html.rs +++ b/src/html.rs @@ -308,11 +308,7 @@ fn fixup_xml(s: &str, doc: &mut PdfDocument, config: &XmlRenderOptions) -> Strin None => { let raw_image = match crate::image::RawImage::decode_from_bytes(&image_bytes) { Ok(o) => o, - Err(e) => { - #[cfg(not(target_family = "wasm"))] - { - println!("{e}"); - } + Err(_) => { continue; } }; diff --git a/src/image.rs b/src/image.rs index 0451412..0d421d4 100644 --- a/src/image.rs +++ b/src/image.rs @@ -1,12 +1,14 @@ use core::fmt; use std::io::Cursor; +use base64::Engine; use image::{DynamicImage, GenericImageView}; +use serde::de::Error; use serde_derive::{Deserialize, Serialize}; use crate::{ColorBits, ColorSpace}; -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, PartialOrd)] +#[derive(Debug, Clone, PartialEq, PartialOrd)] pub struct RawImage { pub pixels: RawImageData, pub width: usize, @@ -15,6 +17,48 @@ pub struct RawImage { pub tag: Vec, } +impl serde::Serialize for RawImage { + fn serialize(&self, serializer: S) -> Result { + // Cycle through all output image formats until one succeeds. + let output_formats = [ + OutputImageFormat::Png, + OutputImageFormat::Jpeg, + OutputImageFormat::Gif, + OutputImageFormat::WebP, + OutputImageFormat::Pnm, + OutputImageFormat::Tiff, + OutputImageFormat::Tga, + OutputImageFormat::Bmp, + OutputImageFormat::Avif, + ]; + let (bytes, fmt) = self + .encode_to_bytes(&output_formats) + .map_err(serde::ser::Error::custom)?; + let base64_str = base64::prelude::BASE64_STANDARD.encode(&bytes); + let data_url = format!("data:{};base64,{}", fmt.mime_type(), base64_str); + serializer.serialize_str(&data_url) + } +} + +impl<'de> serde::Deserialize<'de> for RawImage { + fn deserialize>(deserializer: D) -> Result { + let s = String::deserialize(deserializer)?; + // If the string is a data URL (e.g. "data:image/png;base64,..."), + // strip the header and keep the base64 payload. + let base64_part = if s.starts_with("data:") { + s.find(',') + .map(|idx| &s[idx + 1..]) + .ok_or_else(|| D::Error::custom("Invalid data URL: missing comma"))? + } else { + &s + }; + let bytes = base64::prelude::BASE64_STANDARD + .decode(base64_part) + .map_err(serde::de::Error::custom)?; + Self::decode_from_bytes(&bytes).map_err(serde::de::Error::custom) + } +} + struct RawImageU8 { pub pixels: Vec, pub width: usize, @@ -154,7 +198,8 @@ impl RawImageData { } /// Format to encode the image into -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum OutputImageFormat { /// An Image in PNG Format Png, @@ -388,9 +433,10 @@ impl RawImage { /// NOTE: depends on the enabled image formats! /// - /// Function will try to encode the image to the given formats and return an Error on exhaustion. - /// Tries to encode the image into one of the given target formats, returning the encoded - /// bytes if successful. For simplicity this implementation supports only 8‑bit image data. + /// Function will try to encode the image to the given formats and return an Error on + /// exhaustion. Tries to encode the image into one of the given target formats, returning + /// the encoded bytes if successful. For simplicity this implementation supports only 8‑bit + /// image data. pub fn encode_to_bytes( &self, target_fmt: &[OutputImageFormat], diff --git a/src/lib.rs b/src/lib.rs index 9331c27..881ad0a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2,6 +2,8 @@ use std::collections::BTreeMap; +use serde_derive::{Deserialize, Serialize}; + // #[cfg(target_family = "wasm")] /// Link / bookmark annotation handling pub mod annotation; @@ -59,7 +61,8 @@ pub(crate) mod render; pub use render::PdfToSvgOptions; /// Internal ID for page annotations -#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord)] +#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(transparent)] pub struct PageAnnotId(pub String); impl PageAnnotId { @@ -69,7 +72,8 @@ impl PageAnnotId { } /// Internal ID for XObjects -#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord)] +#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(transparent)] pub struct XObjectId(pub String); impl XObjectId { @@ -79,7 +83,8 @@ impl XObjectId { } /// Internal ID for Fonts -#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord)] +#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(transparent)] pub struct FontId(pub String); impl FontId { @@ -89,7 +94,8 @@ impl FontId { } /// Internal ID for Layers -#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord)] +#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(transparent)] pub struct LayerInternalId(pub String); impl LayerInternalId { @@ -99,7 +105,8 @@ impl LayerInternalId { } /// Internal ID for extended graphic states -#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord)] +#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(transparent)] pub struct ExtendedGraphicsStateId(pub String); impl ExtendedGraphicsStateId { @@ -109,7 +116,8 @@ impl ExtendedGraphicsStateId { } /// Internal ID for ICC profiles -#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord)] +#[derive(Debug, PartialEq, Clone, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(transparent)] pub struct IccProfileId(pub String); impl IccProfileId { @@ -119,7 +127,7 @@ impl IccProfileId { } /// Parsed PDF document -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct PdfDocument { /// Metadata about the document (author, info, XMP metadata, etc.) pub metadata: PdfMetadata, @@ -194,10 +202,13 @@ impl PdfDocument { /// Adds a new page-level bookmark on page `$page`, returning the bookmarks internal ID pub fn add_bookmark(&mut self, name: &str, page: usize) -> PageAnnotId { let id = PageAnnotId::new(); - self.bookmarks.map.insert(id.clone(), PageAnnotation { - name: name.to_string(), - page, - }); + self.bookmarks.map.insert( + id.clone(), + PageAnnotation { + name: name.to_string(), + page, + }, + ); id } @@ -233,7 +244,7 @@ impl PdfDocument { } } -#[derive(Debug, Default, PartialEq, Clone)] +#[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize)] pub struct PdfResources { /// Fonts found in the PDF file, indexed by the sha256 of their contents pub fonts: PdfFontMap, @@ -245,12 +256,12 @@ pub struct PdfResources { pub layers: PdfLayerMap, } -#[derive(Debug, PartialEq, Default, Clone)] +#[derive(Debug, PartialEq, Default, Clone, Serialize, Deserialize)] pub struct PdfLayerMap { pub map: BTreeMap, } -#[derive(Debug, PartialEq, Default, Clone)] +#[derive(Debug, PartialEq, Default, Clone, Serialize, Deserialize)] pub struct PdfFontMap { pub map: BTreeMap, } @@ -258,28 +269,29 @@ pub struct PdfFontMap { #[derive(Debug, PartialEq, Default, Clone)] pub struct ParsedIccProfile {} -#[derive(Debug, PartialEq, Default, Clone)] +#[derive(Debug, PartialEq, Default, Clone, Serialize, Deserialize)] pub struct XObjectMap { pub map: BTreeMap, } -#[derive(Debug, PartialEq, Default, Clone)] +#[derive(Debug, PartialEq, Default, Clone, Serialize, Deserialize)] pub struct PageAnnotMap { pub map: BTreeMap, } -#[derive(Debug, PartialEq, Default, Clone)] +#[derive(Debug, PartialEq, Default, Clone, Serialize, Deserialize)] pub struct ExtendedGraphicsStateMap { pub map: BTreeMap, } /// This is a wrapper in order to keep shared data between the documents XMP metadata and /// the "Info" dictionary in sync -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct PdfMetadata { /// Document information pub info: PdfDocumentInfo, /// XMP Metadata. Is ignored on save if the PDF conformance does not allow XMP + #[serde(skip_serializing_if = "Option::is_none")] pub xmp: Option, } @@ -328,13 +340,13 @@ impl PdfMetadata { /// Initial struct for Xmp metatdata. This should be expanded later for XML handling, etc. /// Right now it just fills out the necessary fields -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct XmpMetadata { /// Web-viewable or "default" or to be left empty. Usually "default". pub rendition_class: Option, } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct PdfDocumentInfo { /// Is the document trapped? pub trapped: bool, diff --git a/src/matrix.rs b/src/matrix.rs index cc1c459..f9d4a9c 100644 --- a/src/matrix.rs +++ b/src/matrix.rs @@ -1,11 +1,14 @@ //! Current transformation matrix, for transforming shapes (rotate, translate, scale) +use serde_derive::{Deserialize, Serialize}; + use crate::units::Pt; /// PDF "current transformation matrix". Once set, will operate on all following shapes, /// until the `layer.restore_graphics_state()` is called. It is important to /// call `layer.save_graphics_state()` earlier. -#[derive(Debug, Copy, Clone, PartialEq)] +#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename = "kebab-case", tag = "type", content = "data")] pub enum CurTransMat { /// Translation matrix (in points from bottom left corner) /// X and Y can have different values @@ -231,7 +234,8 @@ fn mul_add(a: f32, b: f32, c: f32) -> f32 { /// Note: `TextScale` does not exist. Use `layer.set_word_spacing()` /// and `layer.set_character_spacing()` to specify the scaling between words /// and characters. -#[derive(Debug, Copy, PartialEq, Clone)] +#[derive(Debug, Copy, PartialEq, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum TextMatrix { /// Text rotation matrix, used for rotating text Rotate(f32), diff --git a/src/ops.rs b/src/ops.rs index 7df2470..48e0dc4 100644 --- a/src/ops.rs +++ b/src/ops.rs @@ -1,8 +1,8 @@ -use lopdf::Object as LoObject; +use serde_derive::{Deserialize, Serialize}; use crate::{ - BuiltinFont, ExtendedGraphicsStateId, FontId, LayerInternalId, LinkAnnotation, PdfResources, - PdfToSvgOptions, XObjectId, XObjectTransform, + BuiltinFont, DictItem, ExtendedGraphicsStateId, FontId, LayerInternalId, LinkAnnotation, + PdfResources, PdfToSvgOptions, XObjectId, XObjectTransform, color::Color, graphics::{ Line, LineCapStyle, LineDashPattern, LineJoinStyle, Point, Polygon, Rect, TextRenderingMode, @@ -11,7 +11,7 @@ use crate::{ units::{Mm, Pt}, }; -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct PdfPage { pub media_box: Rect, pub trim_box: Rect, @@ -44,9 +44,42 @@ impl PdfPage { pub fn to_svg(&self, resources: &PdfResources, opts: &PdfToSvgOptions) -> String { crate::render::render_to_svg(self, resources, opts) } + + pub fn get_xobject_ids(&self) -> Vec { + self.ops + .iter() + .filter_map(|s| match s { + Op::UseXObject { id, .. } => Some(id.clone()), + _ => None, + }) + .collect() + } + + pub fn get_external_font_ids(&self) -> Vec { + self.ops + .iter() + .filter_map(|s| match s { + Op::WriteText { font, .. } => Some(font.clone()), + Op::WriteCodepoints { font, .. } => Some(font.clone()), + Op::WriteCodepointsWithKerning { font, .. } => Some(font.clone()), + _ => None, + }) + .collect() + } + + pub fn get_layers(&self) -> Vec { + self.ops + .iter() + .filter_map(|s| match s { + Op::BeginLayer { layer_id } | Op::EndLayer { layer_id } => Some(layer_id.clone()), + _ => None, + }) + .collect() + } } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[serde(rename = "kebab-case")] pub enum LayerIntent { View, Design, @@ -61,7 +94,8 @@ impl LayerIntent { } } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[serde(rename = "kebab-case")] pub enum LayerSubtype { Artwork, } @@ -74,7 +108,8 @@ impl LayerSubtype { } } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[serde(rename = "kebab-case")] pub struct Layer { pub name: String, pub creator: String, @@ -94,7 +129,8 @@ impl Layer { } /// Operations that can occur in a PDF page -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(tag = "cmd", content = "args")] pub enum Op { /// Debugging or section marker (arbitrary id can mark a certain point in a stream of /// operations) @@ -196,5 +232,5 @@ pub enum Op { transform: XObjectTransform, }, /// Unknown, custom key / value operation - Unknown { key: String, value: Vec }, + Unknown { key: String, value: Vec }, } diff --git a/src/render.rs b/src/render.rs index 42fef67..cb2008e 100644 --- a/src/render.rs +++ b/src/render.rs @@ -1,23 +1,26 @@ -use crate::ops::PdfPage; -use crate::serialize::prepare_fonts; -use crate::{OutputImageFormat, PdfResources}; +use base64::Engine; +use serde_derive::{Deserialize, Serialize}; -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +use crate::{OutputImageFormat, PdfResources, ops::PdfPage, serialize::prepare_fonts}; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] pub struct PdfToSvgOptions { /// When rendering ImageXObjects, the images are embedded in the SVG. /// You can specify here, which image formats you'd like to output, i.e. /// `[Jpeg, Png, Avif]` will first try to encode the image to /// `image/jpeg,base64=...`, if the encoding fails, it will try - /// `Png`, and last `Avif`. This is because if you want to render the SVG later on - /// using `svg2png`, not all image formats might be supported, but generally in a - /// browser context you can use `WebP` and `Avif` to save space. - pub output_image_formats: Vec, + /// `Png`, and last `Avif`. + /// + /// If you want to render the SVG later using `svg2png`, not all image + /// formats might be supported, but generally in a + /// browser context you can prefer `WebP` and `Avif` to save space. + pub image_formats: Vec, } impl Default for PdfToSvgOptions { fn default() -> Self { Self { - output_image_formats: vec![ + image_formats: vec![ OutputImageFormat::Png, OutputImageFormat::Jpeg, OutputImageFormat::Bmp, @@ -29,7 +32,7 @@ impl Default for PdfToSvgOptions { impl PdfToSvgOptions { pub fn web() -> Self { Self { - output_image_formats: vec![ + image_formats: vec![ OutputImageFormat::Avif, OutputImageFormat::WebP, OutputImageFormat::Jpeg, @@ -62,7 +65,7 @@ pub fn render_to_svg(page: &PdfPage, resources: &PdfResources, opts: &PdfToSvgOp for (font_id, font) in prepare_fonts(resources, &[page.clone()]).iter() { svg.push_str(&format!( r#"@font-face {{ font-family: "{}"; src: url("data:font/otf;charset=utf-8;base64,{}"); }}"#, - font_id.0, base64::encode(&font.subset_font.bytes), + font_id.0, base64::prelude::BASE64_STANDARD.encode(&font.subset_font.bytes), )); } svg.push_str("\n"); @@ -225,7 +228,7 @@ pub fn render_to_svg(page: &PdfPage, resources: &PdfResources, opts: &PdfToSvgOp let points: Vec = line .points .iter() - .map(|(pt, _)| format!("{},{}", pt.x.0, pt.y.0)) + .map(|pt| format!("{},{}", pt.p.x.0, pt.p.y.0)) .collect(); let points_str = points.join(" "); if line.is_closed { @@ -258,10 +261,10 @@ pub fn render_to_svg(page: &PdfPage, resources: &PdfResources, opts: &PdfToSvgOp // Draw a polygon. crate::ops::Op::DrawPolygon { polygon } => { for ring in &polygon.rings { - if let Some((first_pt, _)) = ring.first() { - let mut d = format!("M {} {}", first_pt.x.0, first_pt.y.0); - for (pt, _) in &ring[1..] { - d.push_str(&format!(" L {} {}", pt.x.0, pt.y.0)); + if let Some(first_pt) = ring.points.first() { + let mut d = format!("M {} {}", first_pt.p.x.0, first_pt.p.y.0); + for pt in &ring.points[1..] { + d.push_str(&format!(" L {} {}", pt.p.x.0, pt.p.y.0)); } if polygon.mode == crate::graphics::PaintMode::Fill || polygon.mode == crate::graphics::PaintMode::FillStroke @@ -294,10 +297,11 @@ pub fn render_to_svg(page: &PdfPage, resources: &PdfResources, opts: &PdfToSvgOp if let crate::xobject::XObject::Image(raw_image) = xobj { let img_width = raw_image.width; let img_height = raw_image.height; - match raw_image.encode_to_bytes(&opts.output_image_formats) { + match raw_image.encode_to_bytes(&opts.image_formats) { Ok((encoded_bytes, fmt)) => { let mime = fmt.mime_type(); - let image_data = base64::encode(&encoded_bytes); + let image_data = + base64::prelude::BASE64_STANDARD.encode(&encoded_bytes); svg.push_str(&format!( r#""#, current_x, diff --git a/src/serialize.rs b/src/serialize.rs index 43b60ed..487f62e 100644 --- a/src/serialize.rs +++ b/src/serialize.rs @@ -384,10 +384,10 @@ pub(crate) fn translate_operations( } Op::BeginLayer { layer_id } => { content.push(LoOp::new("q", vec![])); - content.push(LoOp::new("BDC", vec![ - Name("OC".into()), - Name(layer_id.0.clone().into()), - ])); + content.push(LoOp::new( + "BDC", + vec![Name("OC".into()), Name(layer_id.0.clone().into())], + )); } Op::EndLayer { layer_id } => { content.push(LoOp::new("EMC", vec![])); @@ -410,10 +410,10 @@ pub(crate) fn translate_operations( } Op::WriteText { text, font, size } => { if let Some(prepared_font) = fonts.get(font) { - content.push(LoOp::new("Tf", vec![ - font.0.clone().into(), - (size.0).into(), - ])); + content.push(LoOp::new( + "Tf", + vec![font.0.clone().into(), (size.0).into()], + )); let glyph_ids = text .chars() @@ -429,10 +429,10 @@ pub(crate) fn translate_operations( } } Op::WriteTextBuiltinFont { text, font, size } => { - content.push(LoOp::new("Tf", vec![ - font.get_pdf_id().into(), - (size.0).into(), - ])); + content.push(LoOp::new( + "Tf", + vec![font.get_pdf_id().into(), (size.0).into()], + )); let bytes = lopdf::Document::encode_text( &lopdf::Encoding::SimpleEncoding(b"WinAnsiEncoding"), text, @@ -441,10 +441,10 @@ pub(crate) fn translate_operations( } Op::WriteCodepoints { font, cp, size } => { if let Some(prepared_font) = fonts.get(font) { - content.push(LoOp::new("Tf", vec![ - font.0.clone().into(), - (size.0).into(), - ])); + content.push(LoOp::new( + "Tf", + vec![font.0.clone().into(), (size.0).into()], + )); let subset_codepoints = cp .iter() @@ -503,10 +503,10 @@ pub(crate) fn translate_operations( content.push(LoOp::new("Tw", vec![Real(*percent)])); } Op::SetFontSize { size, font } => { - content.push(LoOp::new("Tf", vec![ - font.0.clone().into(), - (size.0).into(), - ])); + content.push(LoOp::new( + "Tf", + vec![font.0.clone().into(), (size.0).into()], + )); } Op::SetTextCursor { pos } => { content.push(LoOp::new("Td", vec![pos.x.0.into(), pos.y.0.into()])); @@ -520,12 +520,10 @@ pub(crate) fn translate_operations( content.push(LoOp::new("sc", vec![Real(*r), Real(*g), Real(*b)])); } Color::Cmyk(crate::Cmyk { c, m, y, k, .. }) => { - content.push(LoOp::new("sc", vec![ - Real(*c), - Real(*m), - Real(*y), - Real(*k), - ])); + content.push(LoOp::new( + "sc", + vec![Real(*c), Real(*m), Real(*y), Real(*k)], + )); } Color::SpotColor(_) => { // handle or unknown @@ -546,10 +544,10 @@ pub(crate) fn translate_operations( } Op::SetLineDashPattern { dash } => { let dash_array_ints = dash.as_array().into_iter().map(Integer).collect(); - content.push(LoOp::new("d", vec![ - Array(dash_array_ints), - Integer(dash.offset), - ])); + content.push(LoOp::new( + "d", + vec![Array(dash_array_ints), Integer(dash.offset)], + )); } Op::SetLineJoinStyle { join } => { content.push(LoOp::new("j", vec![Integer(join.id())])); @@ -608,7 +606,10 @@ pub(crate) fn translate_operations( content.push(LoOp::new("Q", vec![])); } Op::Unknown { key, value } => { - content.push(LoOp::new(key.as_str(), value.clone())); + content.push(LoOp::new( + key.as_str(), + value.iter().map(|s| s.to_lopdf()).collect(), + )); } } } @@ -662,10 +663,10 @@ fn line_to_stream_ops(line: &Line) -> Vec { return operations; }; - operations.push(LoOp::new(OP_PATH_CONST_MOVE_TO, vec![ - line.points[0].0.x.into(), - line.points[0].0.y.into(), - ])); + operations.push(LoOp::new( + OP_PATH_CONST_MOVE_TO, + vec![line.points[0].p.x.into(), line.points[0].p.y.into()], + )); // Skip first element let mut current = 1; @@ -677,7 +678,7 @@ fn line_to_stream_ops(line: &Line) -> Vec { let p1 = &line.points[current - 1]; // prev pt let p2 = &line.points[current]; // current pt - if p1.1 && p2.1 { + if p1.bezier && p2.bezier { // current point is a bezier handle // valid bezier curve must have two sequential bezier handles // we also can"t build a valid cubic bezier curve if the cuve contains less than @@ -685,32 +686,31 @@ fn line_to_stream_ops(line: &Line) -> Vec { // matter if let Some(p3) = line.points.get(current + 1) { if let Some(p4) = line.points.get(current + 2) { - if p1.0 == p2.0 { + if p1.p == p2.p { // first control point coincides with initial point of curve - operations.push(LoOp::new(OP_PATH_CONST_3BEZIER_V1, vec![ - p3.0.x.into(), - p3.0.y.into(), - p4.0.x.into(), - p4.0.y.into(), - ])); - } else if p2.0 == p3.0 { + operations.push(LoOp::new( + OP_PATH_CONST_3BEZIER_V1, + vec![p3.p.x.into(), p3.p.y.into(), p4.p.x.into(), p4.p.y.into()], + )); + } else if p2.p == p3.p { // first control point coincides with final point of curve - operations.push(LoOp::new(OP_PATH_CONST_3BEZIER_V2, vec![ - p2.0.x.into(), - p2.0.y.into(), - p4.0.x.into(), - p4.0.y.into(), - ])); + operations.push(LoOp::new( + OP_PATH_CONST_3BEZIER_V2, + vec![p2.p.x.into(), p2.p.y.into(), p4.p.x.into(), p4.p.y.into()], + )); } else { // regular bezier curve with four points - operations.push(LoOp::new(OP_PATH_CONST_4BEZIER, vec![ - p2.0.x.into(), - p2.0.y.into(), - p3.0.x.into(), - p3.0.y.into(), - p4.0.x.into(), - p4.0.y.into(), - ])); + operations.push(LoOp::new( + OP_PATH_CONST_4BEZIER, + vec![ + p2.p.x.into(), + p2.p.y.into(), + p3.p.x.into(), + p3.p.y.into(), + p4.p.x.into(), + p4.p.y.into(), + ], + )); } current += 3; continue; @@ -719,10 +719,10 @@ fn line_to_stream_ops(line: &Line) -> Vec { } // normal straight line - operations.push(LoOp::new(OP_PATH_CONST_LINE_TO, vec![ - p2.0.x.into(), - p2.0.y.into(), - ])); + operations.push(LoOp::new( + OP_PATH_CONST_LINE_TO, + vec![p2.p.x.into(), p2.p.y.into()], + )); current += 1; } @@ -759,55 +759,54 @@ fn polygon_to_stream_ops(poly: &Polygon) -> Vec { }; for ring in poly.rings.iter() { - operations.push(LoOp::new(OP_PATH_CONST_MOVE_TO, vec![ - ring[0].0.x.into(), - ring[0].0.y.into(), - ])); + operations.push(LoOp::new( + OP_PATH_CONST_MOVE_TO, + vec![ring.points[0].p.x.into(), ring.points[0].p.y.into()], + )); // Skip first element let mut current = 1; - let max_len = ring.len(); + let max_len = ring.points.len(); // Loop over every points, determine if v, y, c or l operation should be used and build // curve / line accordingly while current < max_len { - let p1 = &ring[current - 1]; // prev pt - let p2 = &ring[current]; // current pt + let p1 = &ring.points[current - 1]; // prev pt + let p2 = &ring.points[current]; // current pt - if p1.1 && p2.1 { + if p1.bezier && p2.bezier { // current point is a bezier handle // valid bezier curve must have two sequential bezier handles // we also can"t build a valid cubic bezier curve if the cuve contains less than // four points. If p3 or p4 is marked as "next point is bezier handle" or not, // doesn"t matter - if let Some(p3) = ring.get(current + 1) { - if let Some(p4) = ring.get(current + 2) { - if p1.0 == p2.0 { + if let Some(p3) = ring.points.get(current + 1) { + if let Some(p4) = ring.points.get(current + 2) { + if p1.p == p2.p { // first control point coincides with initial point of curve - operations.push(LoOp::new(OP_PATH_CONST_3BEZIER_V1, vec![ - p3.0.x.into(), - p3.0.y.into(), - p4.0.x.into(), - p4.0.y.into(), - ])); - } else if p2.0 == p3.0 { + operations.push(LoOp::new( + OP_PATH_CONST_3BEZIER_V1, + vec![p3.p.x.into(), p3.p.y.into(), p4.p.x.into(), p4.p.y.into()], + )); + } else if p2.p == p3.p { // first control point coincides with final point of curve - operations.push(LoOp::new(OP_PATH_CONST_3BEZIER_V2, vec![ - p2.0.x.into(), - p2.0.y.into(), - p4.0.x.into(), - p4.0.y.into(), - ])); + operations.push(LoOp::new( + OP_PATH_CONST_3BEZIER_V2, + vec![p2.p.x.into(), p2.p.y.into(), p4.p.x.into(), p4.p.y.into()], + )); } else { // regular bezier curve with four points - operations.push(LoOp::new(OP_PATH_CONST_4BEZIER, vec![ - p2.0.x.into(), - p2.0.y.into(), - p3.0.x.into(), - p3.0.y.into(), - p4.0.x.into(), - p4.0.y.into(), - ])); + operations.push(LoOp::new( + OP_PATH_CONST_4BEZIER, + vec![ + p2.p.x.into(), + p2.p.y.into(), + p3.p.x.into(), + p3.p.y.into(), + p4.p.x.into(), + p4.p.y.into(), + ], + )); } current += 3; continue; @@ -816,10 +815,10 @@ fn polygon_to_stream_ops(poly: &Polygon) -> Vec { } // normal straight line - operations.push(LoOp::new(OP_PATH_CONST_LINE_TO, vec![ - p2.0.x.into(), - p2.0.y.into(), - ])); + operations.push(LoOp::new( + OP_PATH_CONST_LINE_TO, + vec![p2.p.x.into(), p2.p.y.into()], + )); current += 1; } } @@ -879,17 +878,20 @@ pub(crate) fn prepare_fonts( let glyph_ids = font.get_used_glyph_ids(font_id, pages); let cid_to_unicode = font.generate_cid_to_unicode_map(font_id, &glyph_ids); let widths = font.get_normalized_widths(&glyph_ids); - fonts_in_pdf.insert(font_id.clone(), PreparedFont { - original: font.clone(), - subset_font, - cid_to_unicode_map: cid_to_unicode, - vertical_writing: false, // !font.vmtx_data.is_empty(), - ascent: font.font_metrics.ascender as i64, - descent: font.font_metrics.descender as i64, - widths_list: widths, - max_height: font.get_max_height(&glyph_ids), - total_width: font.get_total_width(&glyph_ids), - }); + fonts_in_pdf.insert( + font_id.clone(), + PreparedFont { + original: font.clone(), + subset_font, + cid_to_unicode_map: cid_to_unicode, + vertical_writing: false, // !font.vmtx_data.is_empty(), + ascent: font.font_metrics.ascender as i64, + descent: font.font_metrics.descender as i64, + widths_list: widths, + max_height: font.get_max_height(&glyph_ids), + total_width: font.get_total_width(&glyph_ids), + }, + ); } fonts_in_pdf diff --git a/src/svg.rs b/src/svg.rs index 9e60666..323b374 100644 --- a/src/svg.rs +++ b/src/svg.rs @@ -1,9 +1,8 @@ use std::collections::BTreeMap; -use lopdf::Object; use svg2pdf::{ConversionOptions, PageOptions, usvg}; -use crate::{ColorSpace, PdfResources, xobject::ExternalXObject}; +use crate::{ColorSpace, DictItem, ExternalStream, PdfResources, xobject::ExternalXObject}; /// SVG - wrapper around an `XObject` to allow for more /// control within the library. @@ -39,11 +38,13 @@ impl Svg { let pdf_bytes = svg2pdf::to_pdf(&tree, co, po) .map_err(|err| format!("convert svg tree to pdf: {err}"))?; - let (pdf, _) = - crate::deserialize::parse_pdf_from_bytes(&pdf_bytes, &crate::PdfParseOptions { + let (pdf, _) = crate::deserialize::parse_pdf_from_bytes( + &pdf_bytes, + &crate::PdfParseOptions { fail_on_error: false, - }) - .map_err(|err| format!("convert svg tree to pdf: parse pdf: {err}"))?; + }, + ) + .map_err(|err| format!("convert svg tree to pdf: parse pdf: {err}"))?; let page = pdf .pages @@ -61,27 +62,29 @@ impl Svg { let px_width = width_pt.into_px(dpi); let px_height = height_pt.into_px(dpi); - let dict = lopdf::Dictionary::from_iter(vec![ - ("Type", Object::Name("XObject".into())), - ("Subtype", Object::Name("Form".into())), - ("Width", Object::Integer(px_width.0 as i64)), - ( - "ColorSpace", - Object::Name(ColorSpace::Rgb.as_string().into()), - ), + let rgb = ColorSpace::Rgb.as_string(); + let dict = [ + ("Type", DictItem::Name("XObject".into())), + ("Subtype", DictItem::Name("Form".into())), + ("Width", DictItem::Int(px_width.0 as i64)), + ("ColorSpace", DictItem::Name(rgb.into())), ( "BBox", - Object::Array(vec![ - Object::Integer(0), - Object::Integer(0), - Object::Integer(px_width.0 as i64), - Object::Integer(px_height.0 as i64), + DictItem::Array(vec![ + DictItem::Int(0), + DictItem::Int(0), + DictItem::Int(px_width.0 as i64), + DictItem::Int(px_height.0 as i64), ]), ), - ]); + ]; Ok(ExternalXObject { - stream: lopdf::Stream::new(dict, stream), + stream: ExternalStream { + dict: dict.into_iter().map(|(k, v)| (k.to_string(), v)).collect(), + content: stream, + compress: false, + }, width: Some(px_width), height: Some(px_height), dpi: Some(dpi), diff --git a/src/units.rs b/src/units.rs index 502c676..8491a1f 100644 --- a/src/units.rs +++ b/src/units.rs @@ -2,6 +2,8 @@ use std::{cmp::Ordering, num::FpCategory}; +use serde_derive::{Deserialize, Serialize}; + macro_rules! impl_partialeq { ($t:ty) => { impl PartialEq for $t { @@ -40,7 +42,7 @@ macro_rules! impl_ord { } /// Scale in millimeter -#[derive(Debug, Default, Copy, Clone, PartialOrd)] +#[derive(Debug, Default, Copy, Clone, PartialOrd, Serialize, Deserialize)] pub struct Mm(pub f32); impl Mm { @@ -62,7 +64,7 @@ impl_partialeq!(Mm); impl_ord!(Mm); /// Scale in point -#[derive(Debug, Default, Copy, Clone, PartialOrd)] +#[derive(Debug, Default, Copy, Clone, PartialOrd, Serialize, Deserialize)] pub struct Pt(pub f32); impl Pt { @@ -103,7 +105,7 @@ impl_partialeq!(Pt); impl_ord!(Pt); /// Scale in pixels -#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] +#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] pub struct Px(pub usize); impl Px { diff --git a/src/utils.rs b/src/utils.rs index 501bf42..9f66172 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -69,13 +69,14 @@ pub(crate) fn to_pdf_time_stamp_metadata(date: &OffsetDateTime) -> String { date.second(), ) } -#[cfg(target_family = "wasm")] -pub(crate) fn to_pdf_xmp_date(date: &OffsetDateTime) -> String { + +#[cfg(target_arch = "wasm")] +pub(crate) fn to_pdf_xmp_date(_date: &OffsetDateTime) -> String { "D:1970-01-01T00:00:00+00'00'".to_string() } // D:2018-09-19T10:05:05+00'00' -#[cfg(not(target_family = "wasm"))] +#[cfg(not(target_arch = "wasm"))] pub(crate) fn to_pdf_xmp_date(date: &OffsetDateTime) -> String { // Since the time is in UTC, we know that the time zone // difference to UTC is 0 min, 0 sec, hence the 00'00 diff --git a/src/wasm.rs b/src/wasm.rs index 93e2154..5592c48 100644 --- a/src/wasm.rs +++ b/src/wasm.rs @@ -1,36 +1,59 @@ use std::collections::BTreeMap; -use base64::Engine; +use base64::prelude::*; use serde_derive::{Deserialize, Serialize}; -use crate::{XmlRenderOptions, serialize::PdfSaveOptions}; +use crate::{ + FontId, LayerInternalId, PdfDocument, PdfPage, PdfParseOptions, PdfResources, PdfSaveOptions, + PdfToSvgOptions, PdfWarnMsg, XObjectId, XmlRenderOptions, units::Mm, +}; pub type Base64String = String; -#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] -pub struct PrintPdfApiInput { +#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Default)] +pub struct PrintPdfHtmlInput { + /// Title of the PDF document + #[serde(default, skip_serializing_if = "String::is_empty")] + pub title: String, + /// Input HTML #[serde(default, skip_serializing_if = "String::is_empty")] pub html: String, + /// Input images (i.e. "dog.png" => Base64String) #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub images: BTreeMap, + /// Input fonts (i.e. "Roboto.ttf" => Base64String) #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub fonts: BTreeMap, + /// Miscellaneous options #[serde(default, skip_serializing_if = "PdfGenerationOptions::is_default")] pub options: PdfGenerationOptions, } -#[derive(Serialize, Deserialize, Debug, Default, PartialEq, Clone)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] pub struct PdfGenerationOptions { + /// Whether to compress images and if yes, to what quality level #[serde(default, skip_serializing_if = "Option::is_none")] - pub strict: Option, + pub image_compression: Option, + /// Whether to embed fonts in the PDF (default: true) #[serde(default, skip_serializing_if = "Option::is_none")] - pub dont_compress_images: Option, + pub font_embedding: Option, + /// Page width in mm, default 210.0 #[serde(default, skip_serializing_if = "Option::is_none")] - pub embed_entire_fonts: Option, + pub page_width: Option, + /// Page height in mm, default 297.0 #[serde(default, skip_serializing_if = "Option::is_none")] - pub page_width_mm: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub page_height_mm: Option, + pub page_height: Option, +} + +impl Default for PdfGenerationOptions { + fn default() -> Self { + Self { + image_compression: None, + font_embedding: Some(true), + page_width: Some(210.0), + page_height: Some(297.0), + } + } } impl PdfGenerationOptions { @@ -39,45 +62,81 @@ impl PdfGenerationOptions { } } -#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] +#[derive(Serialize, Debug, PartialEq, Clone)] #[serde(rename_all = "lowercase")] -pub struct PrintPdfApiReturn { +pub struct PrintPdfApiReturn { + /// If "status" is 0, then data contains the processed data + /// If non-zero, data is the error string. pub status: usize, - #[serde(skip_serializing_if = "String::is_empty")] - pub pdf: String, - #[serde(skip_serializing_if = "String::is_empty")] - pub error: String, + /// Data or error of the function called + pub data: StatusOrData, } -#[cfg(feature = "wasm")] +/// Data or error of the output of the function. +#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] +#[serde(untagged)] +enum StatusOrData { + Ok(T), + Error(String), +} + +/// Parses the input HTML, converts it to PDF pages and outputs the generated +/// PDF as a JSON object +/// +/// ```js,no_run,ignore +/// let html = "

Hello!

"; +/// let input = JSON.encode({ html: html, title "My PDF!" }); +/// let document = JSON.parse(Pdf_HtmlToPdfDocument(input)); +/// console.log(document); +/// // { +/// // status: 0, +/// // data: { +/// // metadata: ..., +/// // resources: ..., +/// // bookmarks: ..., +/// // pages: [{ media_box, trim_box, crop_box, ops }] +/// // } +/// // } +/// ``` #[allow(non_snake_case)] -#[wasm_bindgen::prelude::wasm_bindgen] -pub fn PrintPdfFromXml(input: String) -> String { - let init = match serde_json::from_str::(&input) { - Ok(o) => match printpdf_from_xml_internal(o) { - Ok(o) => o, - Err(e) => e, - }, - Err(e) => PrintPdfApiReturn { - pdf: String::new(), - status: 1, - error: format!("failed to parse input parameters: {e}"), - }, +#[cfg_attr(feature = "wasm", wasm_bindgen::prelude::wasm_bindgen)] +pub fn Pdf_HtmlToPdfDocument(input: String) -> String { + let input = match serde_json::from_str::(&input) { + Ok(o) => o, + Err(e) => { + return serde_json::to_string(&PrintPdfApiReturn { + status: 1, + data: StatusOrData::::Error(format!( + "failed to parse input parameters: {e}" + )), + }) + .unwrap_or_default(); + } }; - serde_json::to_string(&init).unwrap_or_default() -} -fn printpdf_from_xml_internal( - input: PrintPdfApiInput, -) -> Result { - use base64::prelude::*; + let document = match pdf_html_to_json(input) { + Ok(o) => o, + Err(e) => { + return serde_json::to_string(&PrintPdfApiReturn { + status: 2, + data: StatusOrData::::Error(e), + }) + .unwrap_or_default(); + } + }; - use crate::units::Mm; + serde_json::to_string(&PrintPdfApiReturn { + status: 0, + data: StatusOrData::Ok(document), + }) + .unwrap_or_default() +} +fn pdf_html_to_json(input: PrintPdfHtmlInput) -> Result { // TODO: extract document title from XML! let opts = XmlRenderOptions { - page_width: Mm(input.options.page_width_mm.unwrap_or(210.0)), - page_height: Mm(input.options.page_height_mm.unwrap_or(297.0)), + page_width: Mm(input.options.page_width.unwrap_or(210.0)), + page_height: Mm(input.options.page_height.unwrap_or(297.0)), images: input .images .iter() @@ -95,21 +154,222 @@ fn printpdf_from_xml_internal( components: Vec::new(), }; - let mut pdf = crate::PdfDocument::new("HTML rendering demo"); + let mut pdf = crate::PdfDocument::new(&input.title); + + let pages = pdf.html2pages(&input.html, opts)?; + + pdf.with_pages(pages); + + Ok(pdf) +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PrintPdfParseInput { + pub pdf_base64: String, + #[serde(default)] + pub options: PdfParseOptions, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PrintPdfParseOutput { + pub pdf: PdfDocument, + #[serde(default)] + pub warnings: Vec, +} + +/// Parses the input PDF file (as a base64 encoded string), outputs the parsed +/// PDF (and any warnings) as a JSON object +/// +/// ```js,no_run,ignore +/// let input = JSON.encode({ pdf_base64: atob(my_pdf) }); +/// let doc = JSON.parse(Pdf_BytesToPdfDocument(input)); +/// console.log(doc.pdf); +/// console.log(doc.warnings); +/// // { +/// // status: 0, +/// // data: { +/// // metadata: ..., +/// // resources: ..., +/// // bookmarks: ..., +/// // pages: [{ media_box, trim_box, crop_box, ops }] +/// // } +/// // } +/// ``` +#[cfg(feature = "wasm")] +#[allow(non_snake_case)] +#[wasm_bindgen::prelude::wasm_bindgen] +pub fn Pdf_BytesToPdfDocument(input: String) -> String { + let input = match serde_json::from_str::(&input) { + Ok(o) => o, + Err(e) => { + return serde_json::to_string(&PrintPdfApiReturn { + status: 1, + data: StatusOrData::::Error(format!( + "failed to parse input parameters: {e}" + )), + }) + .unwrap_or_default(); + } + }; + + let bytes = match base64::prelude::BASE64_STANDARD.decode(&input.pdf_base64) { + Ok(o) => o, + Err(e) => { + return serde_json::to_string(&PrintPdfApiReturn { + status: 2, + data: StatusOrData::::Error(format!( + "failed to parse decode input.pdf_base64 as base64: {e}" + )), + }) + .unwrap_or_default(); + } + }; + + let (doc, warn) = match PdfDocument::parse(&bytes, &input.options) { + Ok((doc, warn)) => (doc, warn), + Err(e) => { + return serde_json::to_string(&PrintPdfApiReturn { + status: 3, + data: StatusOrData::::Error(format!("failed to parse PDF: {e}")), + }) + .unwrap_or_default(); + } + }; + + let output = PrintPdfApiReturn { + status: 0, + data: StatusOrData::Ok(PrintPdfParseOutput { + pdf: doc, + warnings: warn, + }), + }; - let pages = pdf - .html2pages(&input.html, opts) - .map_err(|e| PrintPdfApiReturn { - pdf: String::new(), - status: 2, - error: e, - })?; + serde_json::to_string(&output).unwrap_or_default() +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PrintPdfPageGetResourcesInput { + pub page: PdfPage, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PrintPdfPageGetResourcesOutput { + /// Images, forms, external content + pub xobjects: Vec, + /// External fonts used on this page + pub fonts: Vec, + /// Layers, including info on this page + pub layers: Vec, +} + +/// Helper function that takes a PDF page and outputs a list of all +/// images IDs / fonts IDs that have to be gathered from the documents +/// resources in order to render this page. +#[cfg(feature = "wasm")] +#[allow(non_snake_case)] +#[wasm_bindgen::prelude::wasm_bindgen] +pub fn Pdf_GetResourcesForPage(input: String) -> String { + let input = match serde_json::from_str::(&input) { + Ok(o) => o, + Err(e) => { + return serde_json::to_string(&PrintPdfApiReturn { + status: 1, + data: StatusOrData::::Error(format!( + "failed to parse input parameters: {e}" + )), + }) + .unwrap_or_default(); + } + }; + + let output = PrintPdfPageGetResourcesOutput { + xobjects: input.page.get_xobject_ids(), + fonts: input.page.get_external_font_ids(), + layers: input.page.get_layers(), + }; + + serde_json::to_string(&PrintPdfApiReturn { + status: 0, + data: StatusOrData::Ok(output), + }) + .unwrap_or_default() +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PrintPdfPageToSvgInput { + pub page: PdfPage, + #[serde(default)] + pub resources: PdfResources, + #[serde(default)] + pub options: PdfToSvgOptions, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PrintPdfPageToSvgOutput { + pub svg: String, +} + +/// Takes a `PdfPage` JS object and outputs the SVG string for that page +#[cfg(feature = "wasm")] +#[allow(non_snake_case)] +#[wasm_bindgen::prelude::wasm_bindgen] +pub fn Pdf_PdfPageToSvg(input: String) -> String { + let input = match serde_json::from_str::(&input) { + Ok(o) => o, + Err(e) => { + return serde_json::to_string(&PrintPdfApiReturn { + status: 1, + data: StatusOrData::::Error(format!( + "failed to parse input parameters: {e}" + )), + }) + .unwrap_or_default(); + } + }; + + let svg = input.page.to_svg(&input.resources, &input.options); + + serde_json::to_string(&PrintPdfApiReturn { + status: 0, + data: StatusOrData::Ok(PrintPdfPageToSvgOutput { svg }), + }) + .unwrap_or_default() +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PrintPdfToBytesInput { + pub pdf: PdfDocument, + pub options: PdfSaveOptions, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PrintPdfToBytesOutput { + pub pdf_base64: String, +} + +/// Takes a `PdfDocument` JS object and returns the base64 PDF bytes +#[cfg(feature = "wasm")] +#[allow(non_snake_case)] +#[wasm_bindgen::prelude::wasm_bindgen] +pub fn Pdf_PdfDocumentToBytes(input: String) -> String { + let input = match serde_json::from_str::(&input) { + Ok(o) => o, + Err(e) => { + return serde_json::to_string(&PrintPdfApiReturn { + status: 1, + data: StatusOrData::::Error(format!( + "failed to parse input parameters: {e}" + )), + }) + .unwrap_or_default(); + } + }; - let pdf = pdf.with_pages(pages).save(&PdfSaveOptions::default()); + let bytes = base64::prelude::BASE64_STANDARD.encode(input.pdf.save(&input.options)); - Ok(PrintPdfApiReturn { - pdf: BASE64_STANDARD.encode(pdf), + serde_json::to_string(&PrintPdfApiReturn { status: 0, - error: String::new(), + data: StatusOrData::Ok(PrintPdfToBytesOutput { pdf_base64: bytes }), }) + .unwrap_or_default() } diff --git a/src/xobject.rs b/src/xobject.rs index 21cffbf..6768407 100644 --- a/src/xobject.rs +++ b/src/xobject.rs @@ -1,5 +1,10 @@ +use std::collections::BTreeMap; + +use lopdf::StringFormat; +use serde_derive::{Deserialize, Serialize}; + use crate::{ - OffsetDateTime, + date::OffsetDateTime, image::RawImage, matrix::CurTransMat, units::{Pt, Px}, @@ -10,13 +15,14 @@ use crate::{ /// Gets constructed similar to the `ExtGState`, then inserted into the `/XObject` dictionary /// on the page. You can instantiate `XObjects` with the `/Do` operator. The `layer.add_xobject()` /// (or better yet, the `layer.add_image()`, `layer.add_form()`) methods will do this for you. -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[serde(tag = "type", content = "data")] pub enum XObject { /// Image XObject, for images Image(RawImage), /// Form XObject, NOT A PDF FORM, this just allows repeatable content /// on a page - Form(Box), + Form(FormXObject), /// XObject embedded from an external stream /// /// This is mainly used to add XObjects to the resources that the library @@ -58,7 +64,7 @@ pub(crate) fn add_xobject_to_document( } XObject::External(external_xobject) => { use lopdf::Object::Integer; - let mut stream = external_xobject.stream.clone(); + let mut stream = external_xobject.stream.into_lopdf(); if let Some(w) = external_xobject.width { stream .dict @@ -75,18 +81,144 @@ pub(crate) fn add_xobject_to_document( } /// External XObject, invoked by `/Do` graphics operator -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct ExternalXObject { /// External stream of graphics operations - pub stream: lopdf::Stream, + pub stream: ExternalStream, /// Optional width + #[serde(default)] pub width: Option, /// Optional height + #[serde(default)] pub height: Option, /// Optional DPI of the object + #[serde(default)] pub dpi: Option, } +#[derive(Debug, PartialEq, Default, Clone, Serialize, Deserialize)] +pub struct ExternalStream { + /// Stream description, for simplicity a simple map, corresponds to PDF dict + pub dict: BTreeMap, + /// Stream content + pub content: Vec, + /// Whether the stream can be compressed + pub compress: bool, +} + +impl ExternalStream { + pub(crate) fn into_lopdf(&self) -> lopdf::Stream { + lopdf::Stream::new(build_dict(&self.dict), self.content.clone()) + .with_compression(self.compress) + } +} + +/// Simplified dict item for external streams +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[serde(rename = "kebab-case", tag = "type", content = "data")] +pub enum DictItem { + Array(Vec), + String { data: Vec, literal: bool }, + Bytes(Vec), + Bool(bool), + Float(f32), + Int(i64), + Real(f32), + Name(Vec), + Ref { obj: u32, gen: u16 }, + Dict { map: BTreeMap }, + Stream { stream: ExternalStream }, + Null, +} + +impl DictItem { + pub fn to_lopdf(&self) -> lopdf::Object { + use lopdf::{Object, StringFormat}; + match self { + DictItem::Array(items) => { + let objs = items.iter().map(|item| item.to_lopdf()).collect(); + Object::Array(objs) + } + DictItem::String { data, literal } => { + let format = if *literal { + StringFormat::Literal + } else { + StringFormat::Hexadecimal + }; + Object::String(data.clone(), format) + } + DictItem::Bytes(data) => { + // Treat bytes as a hexadecimal string. + Object::String(data.clone(), StringFormat::Hexadecimal) + } + DictItem::Bool(b) => Object::Boolean(*b), + DictItem::Float(f) => Object::Real(*f), + DictItem::Int(i) => Object::Integer(*i), + DictItem::Real(f) => Object::Real(*f), + DictItem::Name(name) => Object::Name(name.clone()), + DictItem::Ref { obj, gen } => Object::Reference((*obj, *gen)), + DictItem::Dict { map } => { + let dict = map + .iter() + .map(|(k, v)| (k.as_bytes().to_vec(), v.to_lopdf())) + .collect(); + Object::Dictionary(dict) + } + DictItem::Stream { stream } => { + let stream_obj = stream.into_lopdf(); + Object::Stream(stream_obj) + } + DictItem::Null => Object::Null, + } + } + + pub fn from_lopdf(o: &lopdf::Object) -> Self { + use lopdf::Object; + match o { + Object::Null => DictItem::Null, + Object::Boolean(t) => DictItem::Bool(*t), + Object::Integer(i) => DictItem::Int(*i), + Object::Real(r) => DictItem::Real(*r), + Object::Name(items) => DictItem::Name(items.clone()), + Object::String(items, string_format) => DictItem::String { + data: items.clone(), + literal: *string_format == StringFormat::Literal, + }, + Object::Array(objects) => { + DictItem::Array(objects.iter().map(DictItem::from_lopdf).collect()) + } + Object::Dictionary(dictionary) => DictItem::Dict { + map: dictionary + .iter() + .map(|s| { + ( + String::from_utf8_lossy(&s.0).to_string(), + DictItem::from_lopdf(s.1), + ) + }) + .collect(), + }, + Object::Stream(stream) => DictItem::Stream { + stream: ExternalStream { + compress: stream.allows_compression, + content: stream.content.clone(), + dict: stream + .dict + .iter() + .map(|s| { + ( + String::from_utf8_lossy(&s.0).to_string(), + DictItem::from_lopdf(s.1), + ) + }) + .collect(), + }, + }, + Object::Reference((a, b)) => DictItem::Ref { obj: *a, gen: *b }, + } + } +} + /// Describes the format the image bytes are compressed with. #[derive(Debug, PartialEq, Copy, Clone)] pub enum ImageFilter { @@ -105,7 +237,7 @@ pub enum ImageFilter { /// A `FormXObject` is basically a layer-like content stream and can contain anything /// as long as it's a valid strem. A `FormXObject` is intended to be used for reapeated /// content on one page. -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct FormXObject { // /Type /XObject // /Subtype /Form @@ -137,7 +269,7 @@ pub struct FormXObject { /// XObject is required and contains all named resources used by the form XObject. /// These resources are not promoted to the outer content stream’s resource /// dictionary, although that stream’s resource dictionary refers to the form XObject. - pub resources: Option, + pub resources: Option>, /* /Group << dictionary >> */ /// (Optional; PDF 1.4) A group attributes dictionary indicating that the contents of the /// form XObject are to be treated as a group and specifying the attributes of that group @@ -151,15 +283,15 @@ pub struct FormXObject { /// (Optional; PDF 1.4) A reference dictionary identifying a page to be imported from another /// PDF file, and for which the form XObject serves as a proxy (see Section 4.9.3, “Reference /// XObjects”). - pub ref_dict: Option, + pub ref_dict: Option>, /* /Metadata [stream] */ /// (Optional; PDF 1.4) A metadata stream containing metadata for the form XObject /// (see Section 10.2.2, “Metadata Streams”). - pub metadata: Option, + pub metadata: Option>, /* /PieceInfo << dictionary >> */ /// (Optional; PDF 1.3) A page-piece dictionary associated with the form XObject /// (see Section 10.4, “Page-Piece Dictionaries”). - pub piece_info: Option, + pub piece_info: Option>, /* /LastModified (date) */ /// (Required if PieceInfo is present; optional otherwise; PDF 1.3) The date and time /// (see Section 3.8.3, “Dates”) when the form XObject’s contents were most recently @@ -184,13 +316,13 @@ pub struct FormXObject { /* /OPI << dictionary >> */ /// (Optional; PDF 1.2) An OPI version dictionary for the form XObject /// (see Section 10.10.6, “Open Prepress Interface (OPI)”). - pub opi: Option, + pub opi: Option>, /// (Optional; PDF 1.5) An optional content group or optional content membership dictionary /// (see Section 4.10, “Optional Content”) specifying the optional content properties for /// the form XObject. Before the form is processed, its visibility is determined based on /// this entry. If it is determined to be invisible, the entire form is skipped, as if there /// were no Do operator to invoke it. - pub oc: Option, + pub oc: Option>, /* /Name /MyName */ /// __(Required in PDF 1.0; optional otherwise)__ The name by which this form XObject is /// referenced in the XObject subdictionary of the current resource dictionary @@ -217,7 +349,7 @@ fn form_xobject_to_stream(f: &FormXObject, doc: &mut lopdf::Document) -> lopdf:: } if let Some(res) = f.resources.as_ref() { - dict.set("Resources", res.clone()); + dict.set("Resources", build_dict(res)); } if let Some(g) = f.group.as_ref() { @@ -230,15 +362,15 @@ fn form_xobject_to_stream(f: &FormXObject, doc: &mut lopdf::Document) -> lopdf:: } if let Some(r) = f.ref_dict.as_ref() { - dict.set("Ref", r.clone()); + dict.set("Ref", build_dict(&r)); } if let Some(r) = f.metadata.as_ref() { - dict.set("Metadata", doc.add_object(r.clone())); + dict.set("Metadata", doc.add_object(build_dict(&r))); } if let Some(r) = f.piece_info.as_ref() { - dict.set("PieceInfo", doc.add_object(r.clone())); + dict.set("PieceInfo", doc.add_object(build_dict(&r))); } if let Some(r) = f.last_modified.as_ref() { @@ -252,11 +384,11 @@ fn form_xobject_to_stream(f: &FormXObject, doc: &mut lopdf::Document) -> lopdf:: } if let Some(r) = f.opi.as_ref() { - dict.set("OPI", r.clone()); + dict.set("OPI", build_dict(&r)); } if let Some(r) = f.oc.as_ref() { - dict.set("OC", r.clone()); + dict.set("OC", build_dict(&r)); } if let Some(r) = f.name.as_ref() { @@ -277,7 +409,12 @@ fn form_xobject_to_stream(f: &FormXObject, doc: &mut lopdf::Document) -> lopdf:: stream } -#[derive(Debug, PartialEq, Copy, Clone)] +pub fn build_dict(r: &BTreeMap) -> lopdf::Dictionary { + lopdf::Dictionary::from_iter(r.iter().map(|(k, v)| (k.clone(), v.to_lopdf()))) +} + +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum FormType { /// The only form type ever declared by Adobe /* Integer(1) */ @@ -293,12 +430,13 @@ impl FormType { } /// `/Type /Group`` (PDF reference section 4.9.2) -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] pub struct GroupXObject { pub grouptype: GroupXObjectType, } -#[derive(Debug, PartialEq, Copy, Clone)] +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum GroupXObjectType { /// Transparency group XObject (currently the only valid GroupXObject type) TransparencyGroup, @@ -314,7 +452,7 @@ impl GroupXObjectType { /// PDF 1.4 and higher /// Contains a PDF file to be embedded in the current PDF -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct ReferenceXObject { /// (Required) The file containing the target document. (?) pub file: Vec, @@ -325,27 +463,33 @@ pub struct ReferenceXObject { } /// TODO, very low priority -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] pub struct PostScriptXObject { /// __(Optional)__ A stream whose contents are to be used in /// place of the PostScript XObject’s stream when the target /// PostScript interpreter is known to support only LanguageLevel 1 #[allow(dead_code)] - level1: Option>, + pub level1: Option>, } /// Transform that is applied immediately before the /// image gets painted. Does not affect anything other /// than the image. -#[derive(Debug, Copy, Clone, PartialEq, Default)] +#[derive(Debug, Copy, Clone, Default, PartialEq, Deserialize, Serialize)] pub struct XObjectTransform { + #[serde(default)] pub translate_x: Option, + #[serde(default)] pub translate_y: Option, /// Rotate (clockwise), in degree angles + #[serde(default)] pub rotate: Option, + #[serde(default)] pub scale_x: Option, + #[serde(default)] pub scale_y: Option, /// If set to None, will be set to 300.0 for images + #[serde(default)] pub dpi: Option, } @@ -390,8 +534,8 @@ impl XObjectTransform { transforms } - /// Combines the transformation matrices produced by `get_ctms` (with no width/height adjustment) - /// into one final transformation and returns it in SVG's matrix format. + /// Combines the transformation matrices produced by `get_ctms` (with no width/height + /// adjustment) into one final transformation and returns it in SVG's matrix format. pub fn as_svg_transform(&self) -> String { // Get the list of transformation matrices (using None for the width/height info) let ctms = self.get_ctms(None); @@ -416,9 +560,12 @@ impl XObjectTransform { } } -#[derive(Debug, Copy, Clone, PartialEq, Default)] +#[derive(Debug, Copy, Clone, Default, PartialEq, Serialize, Deserialize)] pub struct XObjectRotation { + #[serde(default)] pub angle_ccw_degrees: f32, + #[serde(default)] pub rotation_center_x: Px, + #[serde(default)] pub rotation_center_y: Px, }