mirror of
https://github.com/foliojs/pdfkit.git
synced 2026-01-25 16:06:44 +00:00
Apply prettier formatting
This commit is contained in:
parent
8b087cee41
commit
a76ab284a8
@ -16,52 +16,52 @@ const styles = {
|
||||
h1: {
|
||||
font: 'fonts/Alegreya-Bold.ttf',
|
||||
fontSize: 25,
|
||||
padding: 15
|
||||
padding: 15,
|
||||
},
|
||||
h2: {
|
||||
font: 'fonts/Alegreya-Bold.ttf',
|
||||
fontSize: 18,
|
||||
padding: 10
|
||||
padding: 10,
|
||||
},
|
||||
h3: {
|
||||
font: 'fonts/Alegreya-Bold.ttf',
|
||||
fontSize: 18,
|
||||
padding: 10
|
||||
padding: 10,
|
||||
},
|
||||
para: {
|
||||
font: 'fonts/Merriweather-Regular.ttf',
|
||||
fontSize: 10,
|
||||
padding: 10
|
||||
padding: 10,
|
||||
},
|
||||
code: {
|
||||
font: 'fonts/SourceCodePro-Regular.ttf',
|
||||
fontSize: 9
|
||||
fontSize: 9,
|
||||
},
|
||||
code_block: {
|
||||
padding: 10,
|
||||
background: '#2c2c2c'
|
||||
background: '#2c2c2c',
|
||||
},
|
||||
inlinecode: {
|
||||
font: 'fonts/SourceCodePro-Bold.ttf',
|
||||
fontSize: 10
|
||||
fontSize: 10,
|
||||
},
|
||||
listitem: {
|
||||
font: 'fonts/Merriweather-Regular.ttf',
|
||||
fontSize: 10,
|
||||
padding: 6
|
||||
padding: 6,
|
||||
},
|
||||
link: {
|
||||
font: 'fonts/Merriweather-Regular.ttf',
|
||||
fontSize: 10,
|
||||
color: 'blue',
|
||||
underline: true
|
||||
underline: true,
|
||||
},
|
||||
example: {
|
||||
font: 'Helvetica',
|
||||
fontSize: 9,
|
||||
color: 'black',
|
||||
padding: 10
|
||||
}
|
||||
padding: 10,
|
||||
},
|
||||
};
|
||||
|
||||
// syntax highlighting colors
|
||||
@ -89,7 +89,7 @@ const colors = {
|
||||
quote: '#93a1a1',
|
||||
link: '#93a1a1',
|
||||
special: '#6c71c4',
|
||||
default: '#002b36'
|
||||
default: '#002b36',
|
||||
};
|
||||
|
||||
// shared lorem ipsum text so we don't need to copy it into every example
|
||||
@ -135,7 +135,7 @@ class Node {
|
||||
const color = colors[style] || colors.default;
|
||||
const opts = {
|
||||
color,
|
||||
continued: text !== '\n'
|
||||
continued: text !== '\n',
|
||||
};
|
||||
|
||||
return this.content.push(new Node(['code', opts, text]));
|
||||
@ -164,7 +164,7 @@ class Node {
|
||||
}
|
||||
|
||||
// sets the styles on the document for this node
|
||||
setStyle (doc) {
|
||||
setStyle(doc) {
|
||||
if (this.style.font) {
|
||||
doc.font(this.style.font);
|
||||
}
|
||||
@ -189,7 +189,7 @@ class Node {
|
||||
}
|
||||
|
||||
// renders this node and its subnodes to the document
|
||||
render (doc, continued) {
|
||||
render(doc, continued) {
|
||||
let y;
|
||||
if (continued == null) {
|
||||
continued = false;
|
||||
@ -217,7 +217,7 @@ class Node {
|
||||
// run the example code with the document
|
||||
vm.runInNewContext(this.code, {
|
||||
doc,
|
||||
lorem
|
||||
lorem,
|
||||
});
|
||||
|
||||
// restore points and styles
|
||||
@ -273,7 +273,7 @@ class Node {
|
||||
} else {
|
||||
fragment.render(
|
||||
doc,
|
||||
index < this.content.length - 1 && this.type !== 'bulletlist'
|
||||
index < this.content.length - 1 && this.type !== 'bulletlist',
|
||||
);
|
||||
}
|
||||
|
||||
@ -302,7 +302,7 @@ const render = (doc, filename) => {
|
||||
};
|
||||
|
||||
// renders the title page of the guide
|
||||
const renderTitlePage = doc => {
|
||||
const renderTitlePage = (doc) => {
|
||||
const title = 'PDFKit Guide';
|
||||
const author = 'By Devon Govett';
|
||||
const version = `Version ${require('../package.json').version}`;
|
||||
@ -317,13 +317,13 @@ const renderTitlePage = doc => {
|
||||
doc.y -= 10;
|
||||
doc.text(author, {
|
||||
align: 'center',
|
||||
indent: w - doc.widthOfString(author)
|
||||
indent: w - doc.widthOfString(author),
|
||||
});
|
||||
|
||||
doc.font(styles.para.font, 10);
|
||||
doc.text(version, {
|
||||
align: 'center',
|
||||
indent: w - doc.widthOfString(version)
|
||||
indent: w - doc.widthOfString(version),
|
||||
});
|
||||
|
||||
doc.addPage();
|
||||
|
||||
@ -25,14 +25,14 @@ const files = [
|
||||
'attachments.md',
|
||||
'accessibility.md',
|
||||
'table.md',
|
||||
'you_made_it.md'
|
||||
'you_made_it.md',
|
||||
];
|
||||
|
||||
// shared lorem ipsum text so we don't need to copy it into every example
|
||||
const lorem =
|
||||
'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam in suscipit purus. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Vivamus nec hendrerit felis. Morbi aliquam facilisis risus eu lacinia. Sed eu leo in turpis fringilla hendrerit. Ut nec accumsan nisl. Suspendisse rhoncus nisl posuere tortor tempus et dapibus elit porta. Cras leo neque, elementum a rhoncus ut, vestibulum non nibh. Phasellus pretium justo turpis. Etiam vulputate, odio vitae tincidunt ultricies, eros odio dapibus nisi, ut tincidunt lacus arcu eu elit. Aenean velit erat, vehicula eget lacinia ut, dignissim non tellus. Aliquam nec lacus mi, sed vestibulum nunc. Suspendisse potenti. Curabitur vitae sem turpis. Vestibulum sed neque eget dolor dapibus porttitor at sit amet sem. Fusce a turpis lorem. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae;';
|
||||
|
||||
const getNodeName = function(node) {
|
||||
const getNodeName = function (node) {
|
||||
if (node.length === 3) {
|
||||
return node[2];
|
||||
}
|
||||
@ -49,7 +49,7 @@ const getNodeName = function(node) {
|
||||
return words.join('');
|
||||
};
|
||||
|
||||
const extractHeaders = function(tree) {
|
||||
const extractHeaders = function (tree) {
|
||||
const headers = [];
|
||||
|
||||
for (let index = 0; index < tree.length; index++) {
|
||||
@ -63,7 +63,7 @@ const extractHeaders = function(tree) {
|
||||
node[1].id = hash;
|
||||
headers.push({
|
||||
hash,
|
||||
title: name
|
||||
title: name,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -72,7 +72,7 @@ const extractHeaders = function(tree) {
|
||||
};
|
||||
|
||||
let imageIndex = 0;
|
||||
const generateImages = function(tree) {
|
||||
const generateImages = function (tree) {
|
||||
// find code blocks
|
||||
const codeBlocks = [];
|
||||
for (var node of tree) {
|
||||
@ -100,7 +100,7 @@ const generateImages = function(tree) {
|
||||
|
||||
vm.runInNewContext(code, {
|
||||
doc,
|
||||
lorem
|
||||
lorem,
|
||||
});
|
||||
|
||||
delete attrs.title;
|
||||
@ -119,7 +119,7 @@ const generateImages = function(tree) {
|
||||
console.error(err);
|
||||
}
|
||||
fs.unlinkSync(`${f}.pdf`);
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
@ -135,7 +135,7 @@ for (let file of Array.from(files)) {
|
||||
// turn github highlighted code blocks into normal markdown code blocks
|
||||
content = content.replace(
|
||||
/^```javascript\n((:?.|\n)*?)\n```/gm,
|
||||
(m, $1) => ` ${$1.split('\n').join('\n ')}`
|
||||
(m, $1) => ` ${$1.split('\n').join('\n ')}`,
|
||||
);
|
||||
|
||||
const tree = markdown.parse(content);
|
||||
@ -149,7 +149,7 @@ for (let file of Array.from(files)) {
|
||||
url: `/docs/${file}.html`,
|
||||
title: headers[0].title,
|
||||
headers: headers.slice(1),
|
||||
content: markdown.toHTML(tree)
|
||||
content: markdown.toHTML(tree),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -14,14 +14,14 @@ ghpages.publish(
|
||||
'docs/guide.pdf',
|
||||
'examples/browserify/browser.html',
|
||||
'examples/browserify/bundle.js',
|
||||
'examples/kitchen-sink.pdf'
|
||||
'examples/kitchen-sink.pdf',
|
||||
],
|
||||
add: true,
|
||||
message
|
||||
message,
|
||||
},
|
||||
function(err) {
|
||||
function (err) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
@ -187,7 +187,7 @@ class Data {
|
||||
}
|
||||
|
||||
write(bytes) {
|
||||
return bytes.map(byte => this.writeByte(byte));
|
||||
return bytes.map((byte) => this.writeByte(byte));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -27,7 +27,7 @@ const WIN_ANSI_MAP = {
|
||||
353: 154,
|
||||
376: 159,
|
||||
381: 142,
|
||||
382: 158
|
||||
382: 158,
|
||||
};
|
||||
|
||||
const characters = `\
|
||||
@ -123,7 +123,7 @@ class AFMFont {
|
||||
this.charWidths[char] = this.glyphWidths[characters[char]];
|
||||
}
|
||||
|
||||
this.bbox = this.attributes['FontBBox'].split(/\s+/).map(e => +e);
|
||||
this.bbox = this.attributes['FontBBox'].split(/\s+/).map((e) => +e);
|
||||
this.ascender = +(this.attributes['Ascender'] || 0);
|
||||
this.descender = +(this.attributes['Descender'] || 0);
|
||||
this.xHeight = +(this.attributes['XHeight'] || 0);
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import PDFFont from '../font';
|
||||
|
||||
const toHex = function(num) {
|
||||
const toHex = function (num) {
|
||||
return `0000${num.toString(16)}`.slice(-4);
|
||||
};
|
||||
|
||||
@ -151,7 +151,7 @@ class EmbeddedFont extends PDFFont {
|
||||
|
||||
// generate a tag (6 uppercase letters. 17 is the char code offset from '0' to 'A'. 73 will map to 'Z')
|
||||
const tag = [1, 2, 3, 4, 5, 6]
|
||||
.map(i => String.fromCharCode((this.id.charCodeAt(i) || 73) + 17))
|
||||
.map((i) => String.fromCharCode((this.id.charCodeAt(i) || 73) + 17))
|
||||
.join('');
|
||||
const name = tag + '+' + this.font.postscriptName?.replaceAll(' ', '_');
|
||||
|
||||
@ -164,14 +164,14 @@ class EmbeddedFont extends PDFFont {
|
||||
bbox.minX * this.scale,
|
||||
bbox.minY * this.scale,
|
||||
bbox.maxX * this.scale,
|
||||
bbox.maxY * this.scale
|
||||
bbox.maxY * this.scale,
|
||||
],
|
||||
ItalicAngle: this.font.italicAngle,
|
||||
Ascent: this.ascender,
|
||||
Descent: this.descender,
|
||||
CapHeight: (this.font.capHeight || this.font.ascent) * this.scale,
|
||||
XHeight: (this.font.xHeight || 0) * this.scale,
|
||||
StemV: 0
|
||||
StemV: 0,
|
||||
}); // not sure how to calculate this
|
||||
|
||||
if (isCFF) {
|
||||
@ -198,10 +198,10 @@ class EmbeddedFont extends PDFFont {
|
||||
CIDSystemInfo: {
|
||||
Registry: new String('Adobe'),
|
||||
Ordering: new String('Identity'),
|
||||
Supplement: 0
|
||||
Supplement: 0,
|
||||
},
|
||||
FontDescriptor: descriptor,
|
||||
W: [0, this.widths]
|
||||
W: [0, this.widths],
|
||||
};
|
||||
|
||||
if (!isCFF) {
|
||||
@ -219,7 +219,7 @@ class EmbeddedFont extends PDFFont {
|
||||
BaseFont: name,
|
||||
Encoding: 'Identity-H',
|
||||
DescendantFonts: [descendantFont],
|
||||
ToUnicode: this.toUnicodeCmap()
|
||||
ToUnicode: this.toUnicodeCmap(),
|
||||
};
|
||||
|
||||
return this.dictionary.end();
|
||||
@ -255,7 +255,9 @@ class EmbeddedFont extends PDFFont {
|
||||
for (let i = 0; i < chunks; i++) {
|
||||
const start = i * chunkSize;
|
||||
const end = Math.min((i + 1) * chunkSize, entries.length);
|
||||
ranges.push(`<${toHex(start)}> <${toHex(end - 1)}> [${entries.slice(start, end).join(' ')}]`);
|
||||
ranges.push(
|
||||
`<${toHex(start)}> <${toHex(end - 1)}> [${entries.slice(start, end).join(' ')}]`,
|
||||
);
|
||||
}
|
||||
|
||||
cmap.end(`\
|
||||
|
||||
@ -28,7 +28,7 @@ const STANDARD_FONTS = {
|
||||
'Helvetica-BoldOblique'() {
|
||||
return fs.readFileSync(
|
||||
__dirname + '/data/Helvetica-BoldOblique.afm',
|
||||
'utf8'
|
||||
'utf8',
|
||||
);
|
||||
},
|
||||
'Times-Roman'() {
|
||||
@ -48,7 +48,7 @@ const STANDARD_FONTS = {
|
||||
},
|
||||
ZapfDingbats() {
|
||||
return fs.readFileSync(__dirname + '/data/ZapfDingbats.afm', 'utf8');
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
class StandardFont extends PDFFont {
|
||||
@ -64,7 +64,7 @@ class StandardFont extends PDFFont {
|
||||
bbox: this.bbox,
|
||||
lineGap: this.lineGap,
|
||||
xHeight: this.xHeight,
|
||||
capHeight: this.capHeight
|
||||
capHeight: this.capHeight,
|
||||
} = this.font);
|
||||
}
|
||||
|
||||
@ -73,7 +73,7 @@ class StandardFont extends PDFFont {
|
||||
Type: 'Font',
|
||||
BaseFont: this.name,
|
||||
Subtype: 'Type1',
|
||||
Encoding: 'WinAnsiEncoding'
|
||||
Encoding: 'WinAnsiEncoding',
|
||||
};
|
||||
|
||||
return this.dictionary.end();
|
||||
@ -91,7 +91,7 @@ class StandardFont extends PDFFont {
|
||||
yAdvance: 0,
|
||||
xOffset: 0,
|
||||
yOffset: 0,
|
||||
advanceWidth: this.font.widthOfGlyph(glyph)
|
||||
advanceWidth: this.font.widthOfGlyph(glyph),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -74,7 +74,7 @@ class PDFGradient {
|
||||
Domain: [0, 1],
|
||||
C0: this.stops[i + 0][1],
|
||||
C1: this.stops[i + 1][1],
|
||||
N: 1
|
||||
N: 1,
|
||||
});
|
||||
|
||||
stops.push(fn);
|
||||
@ -90,7 +90,7 @@ class PDFGradient {
|
||||
Domain: [0, 1],
|
||||
Functions: stops,
|
||||
Bounds: bounds,
|
||||
Encode: encode
|
||||
Encode: encode,
|
||||
});
|
||||
|
||||
fn.end();
|
||||
@ -105,12 +105,12 @@ class PDFGradient {
|
||||
Type: 'Pattern',
|
||||
PatternType: 2,
|
||||
Shading: shader,
|
||||
Matrix: this.matrix.map(number)
|
||||
Matrix: this.matrix.map(number),
|
||||
});
|
||||
|
||||
pattern.end();
|
||||
|
||||
if (this.stops.some(stop => stop[2] < 1)) {
|
||||
if (this.stops.some((stop) => stop[2] < 1)) {
|
||||
let grad = this.opacityGradient();
|
||||
grad._colorSpace = 'DeviceGray';
|
||||
|
||||
@ -130,14 +130,14 @@ class PDFGradient {
|
||||
Group: {
|
||||
Type: 'Group',
|
||||
S: 'Transparency',
|
||||
CS: 'DeviceGray'
|
||||
CS: 'DeviceGray',
|
||||
},
|
||||
Resources: {
|
||||
ProcSet: ['PDF', 'Text', 'ImageB', 'ImageC', 'ImageI'],
|
||||
Pattern: {
|
||||
Sh1: grad
|
||||
}
|
||||
}
|
||||
Sh1: grad,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
form.write('/Pattern cs /Sh1 scn');
|
||||
@ -148,8 +148,8 @@ class PDFGradient {
|
||||
SMask: {
|
||||
Type: 'Mask',
|
||||
S: 'Luminosity',
|
||||
G: form
|
||||
}
|
||||
G: form,
|
||||
},
|
||||
});
|
||||
|
||||
gstate.end();
|
||||
@ -165,12 +165,12 @@ class PDFGradient {
|
||||
Resources: {
|
||||
ProcSet: ['PDF', 'Text', 'ImageB', 'ImageC', 'ImageI'],
|
||||
Pattern: {
|
||||
Sh1: pattern
|
||||
Sh1: pattern,
|
||||
},
|
||||
ExtGState: {
|
||||
Gs1: gstate
|
||||
}
|
||||
}
|
||||
Gs1: gstate,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
opacityPattern.write('/Gs1 gs /Pattern cs /Sh1 scn');
|
||||
@ -194,7 +194,7 @@ class PDFGradient {
|
||||
m0 * m21 + m2 * m22,
|
||||
m1 * m21 + m3 * m22,
|
||||
m0 * dx + m2 * dy + m4,
|
||||
m1 * dx + m3 * dy + m5
|
||||
m1 * dx + m3 * dy + m5,
|
||||
];
|
||||
|
||||
if (!this.embedded || m.join(' ') !== this.matrix.join(' ')) {
|
||||
@ -221,7 +221,7 @@ class PDFLinearGradient extends PDFGradient {
|
||||
ColorSpace: this._colorSpace,
|
||||
Coords: [this.x1, this.y1, this.x2, this.y2],
|
||||
Function: fn,
|
||||
Extend: [true, true]
|
||||
Extend: [true, true],
|
||||
});
|
||||
}
|
||||
|
||||
@ -248,7 +248,7 @@ class PDFRadialGradient extends PDFGradient {
|
||||
ColorSpace: this._colorSpace,
|
||||
Coords: [this.x1, this.y1, this.r1, this.x2, this.y2, this.r2],
|
||||
Function: fn,
|
||||
Extend: [true, true]
|
||||
Extend: [true, true],
|
||||
});
|
||||
}
|
||||
|
||||
@ -260,7 +260,7 @@ class PDFRadialGradient extends PDFGradient {
|
||||
this.r1,
|
||||
this.x2,
|
||||
this.y2,
|
||||
this.r2
|
||||
this.r2,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,27 +1,14 @@
|
||||
import exif from 'jpeg-exif';
|
||||
|
||||
const MARKERS = [
|
||||
0xffc0,
|
||||
0xffc1,
|
||||
0xffc2,
|
||||
0xffc3,
|
||||
0xffc5,
|
||||
0xffc6,
|
||||
0xffc7,
|
||||
0xffc8,
|
||||
0xffc9,
|
||||
0xffca,
|
||||
0xffcb,
|
||||
0xffcc,
|
||||
0xffcd,
|
||||
0xffce,
|
||||
0xffcf
|
||||
0xffc0, 0xffc1, 0xffc2, 0xffc3, 0xffc5, 0xffc6, 0xffc7, 0xffc8, 0xffc9,
|
||||
0xffca, 0xffcb, 0xffcc, 0xffcd, 0xffce, 0xffcf,
|
||||
];
|
||||
|
||||
const COLOR_SPACE_MAP = {
|
||||
1: 'DeviceGray',
|
||||
3: 'DeviceRGB',
|
||||
4: 'DeviceCMYK'
|
||||
4: 'DeviceCMYK',
|
||||
};
|
||||
|
||||
class JPEG {
|
||||
@ -76,7 +63,7 @@ class JPEG {
|
||||
Width: this.width,
|
||||
Height: this.height,
|
||||
ColorSpace: this.colorSpace,
|
||||
Filter: 'DCTDecode'
|
||||
Filter: 'DCTDecode',
|
||||
});
|
||||
|
||||
// add extra decode params for CMYK images. By swapping the
|
||||
|
||||
@ -28,7 +28,7 @@ class PNGImage {
|
||||
BitsPerComponent: hasAlphaChannel ? 8 : this.image.bits,
|
||||
Width: this.width,
|
||||
Height: this.height,
|
||||
Filter: 'FlateDecode'
|
||||
Filter: 'FlateDecode',
|
||||
});
|
||||
|
||||
if (!hasAlphaChannel) {
|
||||
@ -36,7 +36,7 @@ class PNGImage {
|
||||
Predictor: isInterlaced ? 1 : 15,
|
||||
Colors: this.image.colors,
|
||||
BitsPerComponent: this.image.bits,
|
||||
Columns: this.width
|
||||
Columns: this.width,
|
||||
});
|
||||
|
||||
this.obj.data['DecodeParms'] = params;
|
||||
@ -55,7 +55,7 @@ class PNGImage {
|
||||
'Indexed',
|
||||
'DeviceRGB',
|
||||
this.image.palette.length / 3 - 1,
|
||||
palette
|
||||
palette,
|
||||
];
|
||||
}
|
||||
|
||||
@ -106,7 +106,7 @@ class PNGImage {
|
||||
BitsPerComponent: 8,
|
||||
Filter: 'FlateDecode',
|
||||
ColorSpace: 'DeviceGray',
|
||||
Decode: [0, 1]
|
||||
Decode: [0, 1],
|
||||
});
|
||||
|
||||
sMask.end(this.alphaChannel);
|
||||
@ -122,7 +122,7 @@ class PNGImage {
|
||||
}
|
||||
|
||||
splitAlphaChannel() {
|
||||
return this.image.decodePixels(pixels => {
|
||||
return this.image.decodePixels((pixels) => {
|
||||
let a, p;
|
||||
const colorCount = this.image.colors;
|
||||
const pixelCount = this.width * this.height;
|
||||
@ -150,7 +150,7 @@ class PNGImage {
|
||||
|
||||
loadIndexedAlphaChannel() {
|
||||
const transparency = this.image.transparency.indexed;
|
||||
return this.image.decodePixels(pixels => {
|
||||
return this.image.decodePixels((pixels) => {
|
||||
const alphaChannel = Buffer.alloc(this.width * this.height);
|
||||
|
||||
let i = 0;
|
||||
@ -164,7 +164,7 @@ class PNGImage {
|
||||
}
|
||||
|
||||
decodeData() {
|
||||
this.image.decodePixels(pixels => {
|
||||
this.image.decodePixels((pixels) => {
|
||||
this.imgData = zlib.deflateSync(pixels);
|
||||
this.finalize();
|
||||
});
|
||||
|
||||
@ -1,35 +1,37 @@
|
||||
|
||||
class PDFMetadata {
|
||||
constructor() {
|
||||
this._metadata = `
|
||||
constructor() {
|
||||
this._metadata = `
|
||||
<?xpacket begin="\ufeff" id="W5M0MpCehiHzreSzNTczkc9d"?>
|
||||
<x:xmpmeta xmlns:x="adobe:ns:meta/">
|
||||
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
|
||||
`;
|
||||
}
|
||||
|
||||
_closeTags() {
|
||||
this._metadata = this._metadata.concat(`
|
||||
}
|
||||
|
||||
_closeTags() {
|
||||
this._metadata = this._metadata.concat(`
|
||||
</rdf:RDF>
|
||||
</x:xmpmeta>
|
||||
<?xpacket end="w"?>
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
||||
append(xml, newline=true) {
|
||||
this._metadata = this._metadata.concat(xml);
|
||||
if (newline)
|
||||
this._metadata = this._metadata.concat('\n');
|
||||
}
|
||||
append(xml, newline = true) {
|
||||
this._metadata = this._metadata.concat(xml);
|
||||
if (newline) this._metadata = this._metadata.concat('\n');
|
||||
}
|
||||
|
||||
getXML() { return this._metadata; }
|
||||
getXML() {
|
||||
return this._metadata;
|
||||
}
|
||||
|
||||
getLength() { return this._metadata.length; }
|
||||
getLength() {
|
||||
return this._metadata.length;
|
||||
}
|
||||
|
||||
end() {
|
||||
this._closeTags();
|
||||
this._metadata = this._metadata.trim();
|
||||
}
|
||||
end() {
|
||||
this._closeTags();
|
||||
this._metadata = this._metadata.trim();
|
||||
}
|
||||
}
|
||||
|
||||
export default PDFMetadata;
|
||||
export default PDFMetadata;
|
||||
|
||||
@ -11,12 +11,12 @@ const FIELD_FLAGS = {
|
||||
edit: 0x40000,
|
||||
sort: 0x80000,
|
||||
multiSelect: 0x200000,
|
||||
noSpell: 0x400000
|
||||
noSpell: 0x400000,
|
||||
};
|
||||
const FIELD_JUSTIFY = {
|
||||
left: 0,
|
||||
center: 1,
|
||||
right: 2
|
||||
right: 2,
|
||||
};
|
||||
const VALUE_MAP = { value: 'V', defaultValue: 'DV' };
|
||||
const FORMAT_SPECIAL = {
|
||||
@ -24,7 +24,7 @@ const FORMAT_SPECIAL = {
|
||||
zipPlus4: '1',
|
||||
zip4: '1',
|
||||
phone: '2',
|
||||
ssn: '3'
|
||||
ssn: '3',
|
||||
};
|
||||
const FORMAT_DEFAULT = {
|
||||
number: {
|
||||
@ -32,12 +32,12 @@ const FORMAT_DEFAULT = {
|
||||
sepComma: false,
|
||||
negStyle: 'MinusBlack',
|
||||
currency: '',
|
||||
currencyPrepend: true
|
||||
currencyPrepend: true,
|
||||
},
|
||||
percent: {
|
||||
nDec: 0,
|
||||
sepComma: false
|
||||
}
|
||||
sepComma: false,
|
||||
},
|
||||
};
|
||||
|
||||
export default {
|
||||
@ -51,7 +51,7 @@ export default {
|
||||
}
|
||||
this._acroform = {
|
||||
fonts: {},
|
||||
defaultFont: this._font.name
|
||||
defaultFont: this._font.name,
|
||||
};
|
||||
this._acroform.fonts[this._font.id] = this._font.ref();
|
||||
|
||||
@ -60,8 +60,8 @@ export default {
|
||||
NeedAppearances: true,
|
||||
DA: new String(`/${this._font.id} 0 Tf 0 g`),
|
||||
DR: {
|
||||
Font: {}
|
||||
}
|
||||
Font: {},
|
||||
},
|
||||
};
|
||||
data.DR.Font[this._font.id] = this._font.ref();
|
||||
const AcroForm = this.ref(data);
|
||||
@ -81,10 +81,10 @@ export default {
|
||||
throw new Error('No fonts specified for PDF form');
|
||||
}
|
||||
let fontDict = this._root.data.AcroForm.data.DR.Font;
|
||||
Object.keys(this._acroform.fonts).forEach(name => {
|
||||
Object.keys(this._acroform.fonts).forEach((name) => {
|
||||
fontDict[name] = this._acroform.fonts[name];
|
||||
});
|
||||
this._root.data.AcroForm.data.Fields.forEach(fieldRef => {
|
||||
this._root.data.AcroForm.data.Fields.forEach((fieldRef) => {
|
||||
this._endChild(fieldRef);
|
||||
});
|
||||
this._root.data.AcroForm.end();
|
||||
@ -94,7 +94,7 @@ export default {
|
||||
|
||||
_endChild(ref) {
|
||||
if (Array.isArray(ref.data.Kids)) {
|
||||
ref.data.Kids.forEach(childRef => {
|
||||
ref.data.Kids.forEach((childRef) => {
|
||||
this._endChild(childRef);
|
||||
});
|
||||
ref.end();
|
||||
@ -181,7 +181,7 @@ export default {
|
||||
_fieldDict(name, type, options = {}) {
|
||||
if (!this._acroform) {
|
||||
throw new Error(
|
||||
'Call document.initForm() method before adding form elements to document'
|
||||
'Call document.initForm() method before adding form elements to document',
|
||||
);
|
||||
}
|
||||
let opts = Object.assign({}, options);
|
||||
@ -253,8 +253,8 @@ export default {
|
||||
'"' + p.negStyle + '"',
|
||||
'null',
|
||||
'"' + p.currency + '"',
|
||||
String(p.currencyPrepend)
|
||||
].join(',')
|
||||
String(p.currencyPrepend),
|
||||
].join(','),
|
||||
);
|
||||
} else if (f.type === 'percent') {
|
||||
let p = Object.assign({}, FORMAT_DEFAULT.percent, f);
|
||||
@ -264,11 +264,11 @@ export default {
|
||||
opts.AA = opts.AA ? opts.AA : {};
|
||||
opts.AA.K = {
|
||||
S: 'JavaScript',
|
||||
JS: new String(`${fnKeystroke}(${params});`)
|
||||
JS: new String(`${fnKeystroke}(${params});`),
|
||||
};
|
||||
opts.AA.F = {
|
||||
S: 'JavaScript',
|
||||
JS: new String(`${fnFormat}(${params});`)
|
||||
JS: new String(`${fnFormat}(${params});`),
|
||||
};
|
||||
}
|
||||
delete opts.format;
|
||||
@ -297,7 +297,7 @@ export default {
|
||||
|
||||
_resolveFlags(options) {
|
||||
let result = 0;
|
||||
Object.keys(options).forEach(key => {
|
||||
Object.keys(options).forEach((key) => {
|
||||
if (FIELD_FLAGS[key]) {
|
||||
if (options[key]) {
|
||||
result |= FIELD_FLAGS[key];
|
||||
@ -367,13 +367,13 @@ export default {
|
||||
options.Opt = select;
|
||||
}
|
||||
|
||||
Object.keys(VALUE_MAP).forEach(key => {
|
||||
Object.keys(VALUE_MAP).forEach((key) => {
|
||||
if (options[key] !== undefined) {
|
||||
options[VALUE_MAP[key]] = options[key];
|
||||
delete options[key];
|
||||
}
|
||||
});
|
||||
['V', 'DV'].forEach(key => {
|
||||
['V', 'DV'].forEach((key) => {
|
||||
if (typeof options[key] === 'string') {
|
||||
options[key] = new String(options[key]);
|
||||
}
|
||||
@ -388,5 +388,5 @@ export default {
|
||||
delete options.label;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@ -47,7 +47,7 @@ export default {
|
||||
options.Subtype = 'Link';
|
||||
options.A = this.ref({
|
||||
S: 'GoTo',
|
||||
D: new String(name)
|
||||
D: new String(name),
|
||||
});
|
||||
options.A.end();
|
||||
return this.annotate(x, y, w, h, options);
|
||||
@ -62,7 +62,7 @@ export default {
|
||||
if (url >= 0 && url < pages.Kids.length) {
|
||||
options.A = this.ref({
|
||||
S: 'GoTo',
|
||||
D: [pages.Kids[url], 'XYZ', null, null, null]
|
||||
D: [pages.Kids[url], 'XYZ', null, null, null],
|
||||
});
|
||||
options.A.end();
|
||||
} else {
|
||||
@ -72,7 +72,7 @@ export default {
|
||||
// Link to an external url
|
||||
options.A = this.ref({
|
||||
S: 'URI',
|
||||
URI: new String(url)
|
||||
URI: new String(url),
|
||||
});
|
||||
options.A.end();
|
||||
}
|
||||
@ -133,10 +133,7 @@ export default {
|
||||
|
||||
fileAnnotation(x, y, w, h, file = {}, options = {}) {
|
||||
// create hidden file
|
||||
const filespec = this.file(
|
||||
file.src,
|
||||
Object.assign({ hidden: true }, file)
|
||||
);
|
||||
const filespec = this.file(file.src, Object.assign({ hidden: true }, file));
|
||||
|
||||
options.Subtype = 'FileAttachment';
|
||||
options.FS = filespec;
|
||||
@ -167,5 +164,5 @@ export default {
|
||||
y2 = m1 * x2 + m3 * y2 + m5;
|
||||
|
||||
return [x1, y1, x2, y2];
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@ -21,7 +21,7 @@ export default {
|
||||
|
||||
const refBody = {
|
||||
Type: 'EmbeddedFile',
|
||||
Params: {}
|
||||
Params: {},
|
||||
};
|
||||
let data;
|
||||
|
||||
@ -66,7 +66,7 @@ export default {
|
||||
|
||||
// add checksum and size information
|
||||
const checksum = CryptoJS.MD5(
|
||||
CryptoJS.lib.WordArray.create(new Uint8Array(data))
|
||||
CryptoJS.lib.WordArray.create(new Uint8Array(data)),
|
||||
);
|
||||
refBody.Params.CheckSum = new String(checksum);
|
||||
refBody.Params.Size = data.byteLength;
|
||||
@ -90,7 +90,7 @@ export default {
|
||||
AFRelationship: options.relationship,
|
||||
F: new String(options.name),
|
||||
EF: { F: ref },
|
||||
UF: new String(options.name)
|
||||
UF: new String(options.name),
|
||||
};
|
||||
if (options.description) {
|
||||
fileSpecBody.Desc = new String(options.description);
|
||||
@ -110,7 +110,7 @@ export default {
|
||||
}
|
||||
|
||||
return filespec;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
/** check two embedded file metadata objects for equality */
|
||||
|
||||
@ -3,13 +3,12 @@ Markings mixin - support marked content sequences in content streams
|
||||
By Ben Schmidt
|
||||
*/
|
||||
|
||||
import PDFStructureElement from "../structure_element";
|
||||
import PDFStructureContent from "../structure_content";
|
||||
import PDFNumberTree from "../number_tree";
|
||||
import PDFObject from "../object";
|
||||
import PDFStructureElement from '../structure_element';
|
||||
import PDFStructureContent from '../structure_content';
|
||||
import PDFNumberTree from '../number_tree';
|
||||
import PDFObject from '../object';
|
||||
|
||||
export default {
|
||||
|
||||
initMarkings(options) {
|
||||
this.structChildren = [];
|
||||
|
||||
@ -50,11 +49,17 @@ export default {
|
||||
dictionary.Type = options.type;
|
||||
}
|
||||
if (Array.isArray(options.bbox)) {
|
||||
dictionary.BBox = [options.bbox[0], this.page.height - options.bbox[3],
|
||||
options.bbox[2], this.page.height - options.bbox[1]];
|
||||
dictionary.BBox = [
|
||||
options.bbox[0],
|
||||
this.page.height - options.bbox[3],
|
||||
options.bbox[2],
|
||||
this.page.height - options.bbox[1],
|
||||
];
|
||||
}
|
||||
if (Array.isArray(options.attached) &&
|
||||
options.attached.every(val => typeof val === 'string')) {
|
||||
if (
|
||||
Array.isArray(options.attached) &&
|
||||
options.attached.every((val) => typeof val === 'string')
|
||||
) {
|
||||
dictionary.Attached = options.attached;
|
||||
}
|
||||
}
|
||||
@ -78,7 +83,9 @@ export default {
|
||||
},
|
||||
|
||||
markStructureContent(tag, options = {}) {
|
||||
const pageStructParents = this.getStructParentTree().get(this.page.structParentTreeKey);
|
||||
const pageStructParents = this.getStructParentTree().get(
|
||||
this.page.structParentTreeKey,
|
||||
);
|
||||
const mcid = pageStructParents.length;
|
||||
pageStructParents.push(null);
|
||||
|
||||
@ -115,7 +122,10 @@ export default {
|
||||
pageMarkings.forEach((marking) => {
|
||||
if (marking.structContent) {
|
||||
const structContent = marking.structContent;
|
||||
const newStructContent = this.markStructureContent(marking.tag, marking.options);
|
||||
const newStructContent = this.markStructureContent(
|
||||
marking.tag,
|
||||
marking.options,
|
||||
);
|
||||
structContent.push(newStructContent);
|
||||
this.page.markings.slice(-1)[0].structContent = structContent;
|
||||
} else {
|
||||
@ -147,7 +157,7 @@ export default {
|
||||
this._root.data.StructTreeRoot = this.ref({
|
||||
Type: 'StructTreeRoot',
|
||||
ParentTree: new PDFNumberTree(),
|
||||
ParentTreeNextKey: 0
|
||||
ParentTreeNextKey: 0,
|
||||
});
|
||||
}
|
||||
return this._root.data.StructTreeRoot;
|
||||
@ -176,6 +186,5 @@ export default {
|
||||
if (this._root.data.MarkInfo) {
|
||||
this._root.data.MarkInfo.end();
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
};
|
||||
|
||||
@ -1,94 +1,101 @@
|
||||
import PDFMetadata from "../metadata"
|
||||
import PDFMetadata from '../metadata';
|
||||
|
||||
export default {
|
||||
initMetadata() {
|
||||
this.metadata = new PDFMetadata();
|
||||
},
|
||||
initMetadata() {
|
||||
this.metadata = new PDFMetadata();
|
||||
},
|
||||
|
||||
appendXML(xml, newline=true) { this.metadata.append(xml,newline); },
|
||||
appendXML(xml, newline = true) {
|
||||
this.metadata.append(xml, newline);
|
||||
},
|
||||
|
||||
_addInfo() {
|
||||
this.appendXML(`
|
||||
_addInfo() {
|
||||
this.appendXML(`
|
||||
<rdf:Description rdf:about="" xmlns:xmp="http://ns.adobe.com/xap/1.0/">
|
||||
<xmp:CreateDate>${this.info.CreationDate.toISOString().split('.')[0]+"Z"}</xmp:CreateDate>
|
||||
<xmp:CreateDate>${this.info.CreationDate.toISOString().split('.')[0] + 'Z'}</xmp:CreateDate>
|
||||
<xmp:CreatorTool>${this.info.Creator}</xmp:CreatorTool>
|
||||
</rdf:Description>
|
||||
`
|
||||
);
|
||||
`);
|
||||
|
||||
if (this.info.Title || this.info.Author || this.info.Subject) {
|
||||
this.appendXML(`
|
||||
if (this.info.Title || this.info.Author || this.info.Subject) {
|
||||
this.appendXML(`
|
||||
<rdf:Description rdf:about="" xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||
`);
|
||||
|
||||
if (this.info.Title) {
|
||||
this.appendXML(`
|
||||
|
||||
if (this.info.Title) {
|
||||
this.appendXML(`
|
||||
<dc:title>
|
||||
<rdf:Alt>
|
||||
<rdf:li xml:lang="x-default">${this.info.Title}</rdf:li>
|
||||
</rdf:Alt>
|
||||
</dc:title>
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.info.Author) {
|
||||
this.appendXML(`
|
||||
if (this.info.Author) {
|
||||
this.appendXML(`
|
||||
<dc:creator>
|
||||
<rdf:Seq>
|
||||
<rdf:li>${this.info.Author}</rdf:li>
|
||||
</rdf:Seq>
|
||||
</dc:creator>
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.info.Subject) {
|
||||
this.appendXML(`
|
||||
if (this.info.Subject) {
|
||||
this.appendXML(`
|
||||
<dc:description>
|
||||
<rdf:Alt>
|
||||
<rdf:li xml:lang="x-default">${this.info.Subject}</rdf:li>
|
||||
</rdf:Alt>
|
||||
</dc:description>
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
||||
this.appendXML(`
|
||||
this.appendXML(`
|
||||
</rdf:Description>
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
||||
this.appendXML(`
|
||||
this.appendXML(
|
||||
`
|
||||
<rdf:Description rdf:about="" xmlns:pdf="http://ns.adobe.com/pdf/1.3/">
|
||||
<pdf:Producer>${this.info.Creator}</pdf:Producer>`, false);
|
||||
<pdf:Producer>${this.info.Creator}</pdf:Producer>`,
|
||||
false,
|
||||
);
|
||||
|
||||
if (this.info.Keywords) {
|
||||
this.appendXML(`
|
||||
<pdf:Keywords>${this.info.Keywords}</pdf:Keywords>`, false);
|
||||
}
|
||||
if (this.info.Keywords) {
|
||||
this.appendXML(
|
||||
`
|
||||
<pdf:Keywords>${this.info.Keywords}</pdf:Keywords>`,
|
||||
false,
|
||||
);
|
||||
}
|
||||
|
||||
this.appendXML(`
|
||||
this.appendXML(`
|
||||
</rdf:Description>
|
||||
`);
|
||||
},
|
||||
},
|
||||
|
||||
endMetadata() {
|
||||
this._addInfo();
|
||||
|
||||
this.metadata.end();
|
||||
endMetadata() {
|
||||
this._addInfo();
|
||||
|
||||
/*
|
||||
this.metadata.end();
|
||||
|
||||
/*
|
||||
Metadata was introduced in PDF 1.4, so adding it to 1.3
|
||||
will likely only take up more space.
|
||||
*/
|
||||
if (this.version != 1.3) {
|
||||
this.metadataRef = this.ref({
|
||||
length: this.metadata.getLength(),
|
||||
Type: 'Metadata',
|
||||
Subtype: 'XML'
|
||||
});
|
||||
this.metadataRef.compress = false;
|
||||
this.metadataRef.write(Buffer.from(this.metadata.getXML(), 'utf-8'));
|
||||
this.metadataRef.end();
|
||||
this._root.data.Metadata = this.metadataRef;
|
||||
}
|
||||
if (this.version != 1.3) {
|
||||
this.metadataRef = this.ref({
|
||||
length: this.metadata.getLength(),
|
||||
Type: 'Metadata',
|
||||
Subtype: 'XML',
|
||||
});
|
||||
this.metadataRef.compress = false;
|
||||
this.metadataRef.write(Buffer.from(this.metadata.getXML(), 'utf-8'));
|
||||
this.metadataRef.end();
|
||||
this._root.data.Metadata = this.metadataRef;
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@ -20,11 +20,13 @@ export default {
|
||||
},
|
||||
|
||||
_addColorOutputIntent() {
|
||||
const iccProfile = fs.readFileSync(`${__dirname}/data/sRGB_IEC61966_2_1.icc`);
|
||||
const iccProfile = fs.readFileSync(
|
||||
`${__dirname}/data/sRGB_IEC61966_2_1.icc`,
|
||||
);
|
||||
|
||||
const colorProfileRef = this.ref({
|
||||
Length: iccProfile.length,
|
||||
N: 3
|
||||
N: 3,
|
||||
});
|
||||
colorProfileRef.write(iccProfile);
|
||||
colorProfileRef.end();
|
||||
@ -34,7 +36,7 @@ export default {
|
||||
S: 'GTS_PDFA1',
|
||||
Info: new String('sRGB IEC61966-2.1'),
|
||||
OutputConditionIdentifier: new String('sRGB IEC61966-2.1'),
|
||||
DestOutputProfile: colorProfileRef
|
||||
DestOutputProfile: colorProfileRef,
|
||||
});
|
||||
intentRef.end();
|
||||
|
||||
@ -52,5 +54,5 @@ export default {
|
||||
|
||||
_addPdfaMetadata() {
|
||||
this.appendXML(this._getPdfaid());
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@ -1,24 +1,21 @@
|
||||
|
||||
export default {
|
||||
initPDFUA() {
|
||||
this.subset = 1;
|
||||
},
|
||||
|
||||
initPDFUA() {
|
||||
this.subset = 1;
|
||||
},
|
||||
endSubset() {
|
||||
this._addPdfuaMetadata();
|
||||
},
|
||||
|
||||
endSubset() {
|
||||
this._addPdfuaMetadata();
|
||||
},
|
||||
_addPdfuaMetadata() {
|
||||
this.appendXML(this._getPdfuaid());
|
||||
},
|
||||
|
||||
_addPdfuaMetadata() {
|
||||
this.appendXML(this._getPdfuaid());
|
||||
},
|
||||
|
||||
_getPdfuaid() {
|
||||
return `
|
||||
_getPdfuaid() {
|
||||
return `
|
||||
<rdf:Description xmlns:pdfuaid="http://www.aiim.org/pdfua/ns/id/" rdf:about="">
|
||||
<pdfuaid:part>${this.subset}</pdfuaid:part>
|
||||
</rdf:Description>
|
||||
`;
|
||||
},
|
||||
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@ -2,29 +2,28 @@ import PDFA from './pdfa';
|
||||
import PDFUA from './pdfua';
|
||||
|
||||
export default {
|
||||
_importSubset(subset) {
|
||||
Object.assign(this, subset)
|
||||
},
|
||||
_importSubset(subset) {
|
||||
Object.assign(this, subset);
|
||||
},
|
||||
|
||||
initSubset(options) {
|
||||
|
||||
switch (options.subset) {
|
||||
case 'PDF/A-1':
|
||||
case 'PDF/A-1a':
|
||||
case 'PDF/A-1b':
|
||||
case 'PDF/A-2':
|
||||
case 'PDF/A-2a':
|
||||
case 'PDF/A-2b':
|
||||
case 'PDF/A-3':
|
||||
case 'PDF/A-3a':
|
||||
case 'PDF/A-3b':
|
||||
this._importSubset(PDFA);
|
||||
this.initPDFA(options.subset);
|
||||
break;
|
||||
case 'PDF/UA':
|
||||
this._importSubset(PDFUA);
|
||||
this.initPDFUA();
|
||||
break;
|
||||
}
|
||||
initSubset(options) {
|
||||
switch (options.subset) {
|
||||
case 'PDF/A-1':
|
||||
case 'PDF/A-1a':
|
||||
case 'PDF/A-1b':
|
||||
case 'PDF/A-2':
|
||||
case 'PDF/A-2a':
|
||||
case 'PDF/A-2b':
|
||||
case 'PDF/A-3':
|
||||
case 'PDF/A-3a':
|
||||
case 'PDF/A-3b':
|
||||
this._importSubset(PDFA);
|
||||
this.initPDFA(options.subset);
|
||||
break;
|
||||
case 'PDF/UA':
|
||||
this._importSubset(PDFUA);
|
||||
this.initPDFUA();
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
PDFNameTree - represents a name tree object
|
||||
*/
|
||||
|
||||
import PDFTree from "./tree";
|
||||
import PDFTree from './tree';
|
||||
|
||||
class PDFNameTree extends PDFTree {
|
||||
_compareKeys(a, b) {
|
||||
@ -10,7 +10,7 @@ class PDFNameTree extends PDFTree {
|
||||
}
|
||||
|
||||
_keysName() {
|
||||
return "Names";
|
||||
return 'Names';
|
||||
}
|
||||
|
||||
_dataForKey(k) {
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
PDFNumberTree - represents a number tree object
|
||||
*/
|
||||
|
||||
import PDFTree from "./tree";
|
||||
import PDFTree from './tree';
|
||||
|
||||
class PDFNumberTree extends PDFTree {
|
||||
_compareKeys(a, b) {
|
||||
@ -10,7 +10,7 @@ class PDFNumberTree extends PDFTree {
|
||||
}
|
||||
|
||||
_keysName() {
|
||||
return "Nums";
|
||||
return 'Nums';
|
||||
}
|
||||
|
||||
_dataForKey(k) {
|
||||
|
||||
@ -18,11 +18,11 @@ const escapable = {
|
||||
'\f': '\\f',
|
||||
'\\': '\\\\',
|
||||
'(': '\\(',
|
||||
')': '\\)'
|
||||
')': '\\)',
|
||||
};
|
||||
|
||||
// Convert little endian UTF-16 to big endian
|
||||
const swapBytes = function(buff) {
|
||||
const swapBytes = function (buff) {
|
||||
const l = buff.length;
|
||||
if (l & 0x01) {
|
||||
throw new Error('Buffer length must be even');
|
||||
@ -71,7 +71,7 @@ class PDFObject {
|
||||
}
|
||||
|
||||
// Escape characters as required by the spec
|
||||
string = string.replace(escapableRe, c => escapable[c]);
|
||||
string = string.replace(escapableRe, (c) => escapable[c]);
|
||||
|
||||
return `(${string})`;
|
||||
|
||||
@ -99,12 +99,14 @@ class PDFObject {
|
||||
string = encryptFn(Buffer.from(string, 'ascii')).toString('binary');
|
||||
|
||||
// Escape characters as required by the spec
|
||||
string = string.replace(escapableRe, c => escapable[c]);
|
||||
string = string.replace(escapableRe, (c) => escapable[c]);
|
||||
}
|
||||
|
||||
return `(${string})`;
|
||||
} else if (Array.isArray(object)) {
|
||||
const items = object.map(e => PDFObject.convert(e, encryptFn)).join(' ');
|
||||
const items = object
|
||||
.map((e) => PDFObject.convert(e, encryptFn))
|
||||
.join(' ');
|
||||
return `[${items}]`;
|
||||
} else if ({}.toString.call(object) === '[object Object]') {
|
||||
const out = ['<<'];
|
||||
|
||||
@ -26,7 +26,7 @@ class PDFOutline {
|
||||
this.dictionary,
|
||||
title,
|
||||
this.document.page,
|
||||
options
|
||||
options,
|
||||
);
|
||||
this.children.push(result);
|
||||
|
||||
|
||||
10
lib/page.js
10
lib/page.js
@ -65,7 +65,7 @@ const SIZES = {
|
||||
FOLIO: [612.0, 936.0],
|
||||
LEGAL: [612.0, 1008.0],
|
||||
LETTER: [612.0, 792.0],
|
||||
TABLOID: [792.0, 1224.0]
|
||||
TABLOID: [792.0, 1224.0],
|
||||
};
|
||||
|
||||
class PDFPage {
|
||||
@ -92,12 +92,12 @@ class PDFPage {
|
||||
this.margins = normalizeSides(
|
||||
options.margin ?? options.margins,
|
||||
DEFAULT_MARGINS,
|
||||
x => document.sizeToPoint(x, 0, this)
|
||||
)
|
||||
(x) => document.sizeToPoint(x, 0, this),
|
||||
);
|
||||
|
||||
// Initialize the Font, XObject, and ExtGState dictionaries
|
||||
this.resources = this.document.ref({
|
||||
ProcSet: ['PDF', 'Text', 'ImageB', 'ImageC', 'ImageI']
|
||||
ProcSet: ['PDF', 'Text', 'ImageB', 'ImageC', 'ImageI'],
|
||||
});
|
||||
|
||||
// The page dictionary
|
||||
@ -106,7 +106,7 @@ class PDFPage {
|
||||
Parent: this.document._root.data.Pages,
|
||||
MediaBox: [0, 0, this.width, this.height],
|
||||
Contents: this.content,
|
||||
Resources: this.resources
|
||||
Resources: this.resources,
|
||||
});
|
||||
|
||||
this.markings = [];
|
||||
|
||||
20
lib/path.js
20
lib/path.js
@ -22,10 +22,10 @@ const parameters = {
|
||||
V: 1,
|
||||
v: 1,
|
||||
Z: 0,
|
||||
z: 0
|
||||
z: 0,
|
||||
};
|
||||
|
||||
const parse = function(path) {
|
||||
const parse = function (path) {
|
||||
let cmd;
|
||||
const ret = [];
|
||||
let args = [];
|
||||
@ -110,7 +110,7 @@ const parse = function(path) {
|
||||
return ret;
|
||||
};
|
||||
|
||||
const apply = function(commands, doc) {
|
||||
const apply = function (commands, doc) {
|
||||
// current point, control point, and subpath starting point
|
||||
cx = cy = px = py = sx = sy = 0;
|
||||
|
||||
@ -157,7 +157,7 @@ const runners = {
|
||||
a[2] + cx,
|
||||
a[3] + cy,
|
||||
a[4] + cx,
|
||||
a[5] + cy
|
||||
a[5] + cy,
|
||||
);
|
||||
px = cx + a[2];
|
||||
py = cy + a[3];
|
||||
@ -190,7 +190,7 @@ const runners = {
|
||||
cx + a[0],
|
||||
cy + a[1],
|
||||
cx + a[2],
|
||||
cy + a[3]
|
||||
cy + a[3],
|
||||
);
|
||||
px = cx + a[0];
|
||||
py = cy + a[1];
|
||||
@ -306,10 +306,10 @@ const runners = {
|
||||
doc.closePath();
|
||||
cx = sx;
|
||||
return (cy = sy);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const solveArc = function(doc, x, y, coords) {
|
||||
const solveArc = function (doc, x, y, coords) {
|
||||
const [rx, ry, rot, large, sweep, ex, ey] = coords;
|
||||
const segs = arcToSegments(ex, ey, rx, ry, large, sweep, rot, x, y);
|
||||
|
||||
@ -320,7 +320,7 @@ const solveArc = function(doc, x, y, coords) {
|
||||
};
|
||||
|
||||
// from Inkscape svgtopdf, thanks!
|
||||
const arcToSegments = function(x, y, rx, ry, large, sweep, rotateX, ox, oy) {
|
||||
const arcToSegments = function (x, y, rx, ry, large, sweep, rotateX, ox, oy) {
|
||||
const th = rotateX * (Math.PI / 180);
|
||||
const sin_th = Math.sin(th);
|
||||
const cos_th = Math.cos(th);
|
||||
@ -379,7 +379,7 @@ const arcToSegments = function(x, y, rx, ry, large, sweep, rotateX, ox, oy) {
|
||||
return result;
|
||||
};
|
||||
|
||||
const segmentToBezier = function(cx, cy, th0, th1, rx, ry, sin_th, cos_th) {
|
||||
const segmentToBezier = function (cx, cy, th0, th1, rx, ry, sin_th, cos_th) {
|
||||
const a00 = cos_th * rx;
|
||||
const a01 = -sin_th * ry;
|
||||
const a10 = sin_th * rx;
|
||||
@ -402,7 +402,7 @@ const segmentToBezier = function(cx, cy, th0, th1, rx, ry, sin_th, cos_th) {
|
||||
a00 * x2 + a01 * y2,
|
||||
a10 * x2 + a11 * y2,
|
||||
a00 * x3 + a01 * y3,
|
||||
a10 * x3 + a11 * y3
|
||||
a10 * x3 + a11 * y3,
|
||||
];
|
||||
};
|
||||
|
||||
|
||||
@ -28,7 +28,7 @@ class PDFTilingPattern {
|
||||
m0 * m21 + m2 * m22,
|
||||
m1 * m21 + m3 * m22,
|
||||
m0 * dx + m2 * dy + m4,
|
||||
m1 * dx + m3 * dy + m5
|
||||
m1 * dx + m3 * dy + m5,
|
||||
];
|
||||
const pattern = this.doc.ref({
|
||||
Type: 'Pattern',
|
||||
@ -38,8 +38,8 @@ class PDFTilingPattern {
|
||||
BBox: this.bBox,
|
||||
XStep: this.xStep,
|
||||
YStep: this.yStep,
|
||||
Matrix: m.map(v => +v.toFixed(5)),
|
||||
Resources: resources
|
||||
Matrix: m.map((v) => +v.toFixed(5)),
|
||||
Resources: resources,
|
||||
});
|
||||
pattern.end(this.stream);
|
||||
return pattern;
|
||||
@ -48,7 +48,7 @@ class PDFTilingPattern {
|
||||
embedPatternColorSpaces() {
|
||||
// map each pattern to an underlying color space
|
||||
// and embed on each page
|
||||
underlyingColorSpaces.forEach(csName => {
|
||||
underlyingColorSpaces.forEach((csName) => {
|
||||
const csId = this.getPatternColorSpaceId(csName);
|
||||
|
||||
if (this.doc.page.colorSpaces[csId]) return;
|
||||
@ -86,14 +86,14 @@ class PDFTilingPattern {
|
||||
|
||||
// select one of the pattern color spaces
|
||||
const csId = this.getPatternColorSpaceId(
|
||||
this.doc._getColorSpace(normalizedColor)
|
||||
this.doc._getColorSpace(normalizedColor),
|
||||
);
|
||||
this.doc._setColorSpace(csId, stroke);
|
||||
|
||||
// stroke/fill using the pattern and color (in the above underlying color space)
|
||||
const op = stroke ? 'SCN' : 'scn';
|
||||
return this.doc.addContent(
|
||||
`${normalizedColor.join(' ')} /${this.id} ${op}`
|
||||
`${normalizedColor.join(' ')} /${this.id} ${op}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -4,7 +4,7 @@ import {
|
||||
isNonASCIISpaceCharacter,
|
||||
isProhibitedCharacter,
|
||||
isBidirectionalRAL,
|
||||
isBidirectionalL
|
||||
isBidirectionalL,
|
||||
} from './lib/code-points';
|
||||
|
||||
// 2.1. Mapping
|
||||
@ -22,9 +22,9 @@ const mapping2space = isNonASCIISpaceCharacter;
|
||||
const mapping2nothing = isCommonlyMappedToNothing;
|
||||
|
||||
// utils
|
||||
const getCodePoint = character => character.codePointAt(0);
|
||||
const first = x => x[0];
|
||||
const last = x => x[x.length - 1];
|
||||
const getCodePoint = (character) => character.codePointAt(0);
|
||||
const first = (x) => x[0];
|
||||
const last = (x) => x[x.length - 1];
|
||||
|
||||
/**
|
||||
* Convert provided string into an array of Unicode Code Points.
|
||||
@ -75,9 +75,9 @@ function saslprep(input, opts = {}) {
|
||||
// 1. Map
|
||||
const mapped_input = toCodePoints(input)
|
||||
// 1.1 mapping to space
|
||||
.map(character => (mapping2space(character) ? 0x20 : character))
|
||||
.map((character) => (mapping2space(character) ? 0x20 : character))
|
||||
// 1.2 mapping to nothing
|
||||
.filter(character => !mapping2nothing(character));
|
||||
.filter((character) => !mapping2nothing(character));
|
||||
|
||||
// 2. Normalize
|
||||
const normalized_input = String.fromCodePoint
|
||||
@ -91,7 +91,7 @@ function saslprep(input, opts = {}) {
|
||||
|
||||
if (hasProhibited) {
|
||||
throw new Error(
|
||||
'Prohibited character, see https://tools.ietf.org/html/rfc4013#section-2.3'
|
||||
'Prohibited character, see https://tools.ietf.org/html/rfc4013#section-2.3',
|
||||
);
|
||||
}
|
||||
|
||||
@ -101,7 +101,7 @@ function saslprep(input, opts = {}) {
|
||||
|
||||
if (hasUnassigned) {
|
||||
throw new Error(
|
||||
'Unassigned code point, see https://tools.ietf.org/html/rfc4013#section-2.5'
|
||||
'Unassigned code point, see https://tools.ietf.org/html/rfc4013#section-2.5',
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -117,7 +117,7 @@ function saslprep(input, opts = {}) {
|
||||
if (hasBidiRAL && hasBidiL) {
|
||||
throw new Error(
|
||||
'String must not contain RandALCat and LCat at the same time,' +
|
||||
' see https://tools.ietf.org/html/rfc3454#section-6'
|
||||
' see https://tools.ietf.org/html/rfc3454#section-6',
|
||||
);
|
||||
}
|
||||
|
||||
@ -128,16 +128,16 @@ function saslprep(input, opts = {}) {
|
||||
*/
|
||||
|
||||
const isFirstBidiRAL = isBidirectionalRAL(
|
||||
getCodePoint(first(normalized_input))
|
||||
getCodePoint(first(normalized_input)),
|
||||
);
|
||||
const isLastBidiRAL = isBidirectionalRAL(
|
||||
getCodePoint(last(normalized_input))
|
||||
getCodePoint(last(normalized_input)),
|
||||
);
|
||||
|
||||
if (hasBidiRAL && !(isFirstBidiRAL && isLastBidiRAL)) {
|
||||
throw new Error(
|
||||
'Bidirectional RandALCat character must be the first and the last' +
|
||||
' character of the string, see https://tools.ietf.org/html/rfc3454#section-6'
|
||||
' character of the string, see https://tools.ietf.org/html/rfc3454#section-6',
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
119
lib/security.js
119
lib/security.js
@ -60,7 +60,7 @@ class PDFSecurity {
|
||||
}
|
||||
|
||||
const encDict = {
|
||||
Filter: 'Standard'
|
||||
Filter: 'Standard',
|
||||
};
|
||||
|
||||
switch (this.version) {
|
||||
@ -106,7 +106,7 @@ class PDFSecurity {
|
||||
r,
|
||||
this.keyBits,
|
||||
paddedUserPassword,
|
||||
paddedOwnerPassword
|
||||
paddedOwnerPassword,
|
||||
);
|
||||
this.encryptionKey = getEncryptionKeyR2R3R4(
|
||||
r,
|
||||
@ -114,7 +114,7 @@ class PDFSecurity {
|
||||
this.document._id,
|
||||
paddedUserPassword,
|
||||
ownerPasswordEntry,
|
||||
permissions
|
||||
permissions,
|
||||
);
|
||||
let userPasswordEntry;
|
||||
if (r === 2) {
|
||||
@ -122,7 +122,7 @@ class PDFSecurity {
|
||||
} else {
|
||||
userPasswordEntry = getUserPasswordR3R4(
|
||||
this.document._id,
|
||||
this.encryptionKey
|
||||
this.encryptionKey,
|
||||
);
|
||||
}
|
||||
|
||||
@ -135,8 +135,8 @@ class PDFSecurity {
|
||||
StdCF: {
|
||||
AuthEvent: 'DocOpen',
|
||||
CFM: 'AESV2',
|
||||
Length: this.keyBits / 8
|
||||
}
|
||||
Length: this.keyBits / 8,
|
||||
},
|
||||
};
|
||||
encDict.StmF = 'StdCF';
|
||||
encDict.StrF = 'StdCF';
|
||||
@ -157,40 +157,40 @@ class PDFSecurity {
|
||||
: processedUserPassword;
|
||||
|
||||
this.encryptionKey = getEncryptionKeyR5(
|
||||
PDFSecurity.generateRandomWordArray
|
||||
PDFSecurity.generateRandomWordArray,
|
||||
);
|
||||
const userPasswordEntry = getUserPasswordR5(
|
||||
processedUserPassword,
|
||||
PDFSecurity.generateRandomWordArray
|
||||
PDFSecurity.generateRandomWordArray,
|
||||
);
|
||||
const userKeySalt = CryptoJS.lib.WordArray.create(
|
||||
userPasswordEntry.words.slice(10, 12),
|
||||
8
|
||||
8,
|
||||
);
|
||||
const userEncryptionKeyEntry = getUserEncryptionKeyR5(
|
||||
processedUserPassword,
|
||||
userKeySalt,
|
||||
this.encryptionKey
|
||||
this.encryptionKey,
|
||||
);
|
||||
const ownerPasswordEntry = getOwnerPasswordR5(
|
||||
processedOwnerPassword,
|
||||
userPasswordEntry,
|
||||
PDFSecurity.generateRandomWordArray
|
||||
PDFSecurity.generateRandomWordArray,
|
||||
);
|
||||
const ownerKeySalt = CryptoJS.lib.WordArray.create(
|
||||
ownerPasswordEntry.words.slice(10, 12),
|
||||
8
|
||||
8,
|
||||
);
|
||||
const ownerEncryptionKeyEntry = getOwnerEncryptionKeyR5(
|
||||
processedOwnerPassword,
|
||||
ownerKeySalt,
|
||||
userPasswordEntry,
|
||||
this.encryptionKey
|
||||
this.encryptionKey,
|
||||
);
|
||||
const permsEntry = getEncryptedPermissionsR5(
|
||||
permissions,
|
||||
this.encryptionKey,
|
||||
PDFSecurity.generateRandomWordArray
|
||||
PDFSecurity.generateRandomWordArray,
|
||||
);
|
||||
|
||||
encDict.V = 5;
|
||||
@ -199,8 +199,8 @@ class PDFSecurity {
|
||||
StdCF: {
|
||||
AuthEvent: 'DocOpen',
|
||||
CFM: 'AESV3',
|
||||
Length: this.keyBits / 8
|
||||
}
|
||||
Length: this.keyBits / 8,
|
||||
},
|
||||
};
|
||||
encDict.StmF = 'StdCF';
|
||||
encDict.StrF = 'StdCF';
|
||||
@ -225,27 +225,27 @@ class PDFSecurity {
|
||||
((obj & 0xff00) << 8) |
|
||||
((obj >> 8) & 0xff00) |
|
||||
(gen & 0xff),
|
||||
(gen & 0xff00) << 16
|
||||
(gen & 0xff00) << 16,
|
||||
],
|
||||
5
|
||||
)
|
||||
5,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (this.version === 1 || this.version === 2) {
|
||||
let key = CryptoJS.MD5(digest);
|
||||
key.sigBytes = Math.min(16, this.keyBits / 8 + 5);
|
||||
return buffer =>
|
||||
return (buffer) =>
|
||||
wordArrayToBuffer(
|
||||
CryptoJS.RC4.encrypt(CryptoJS.lib.WordArray.create(buffer), key)
|
||||
.ciphertext
|
||||
.ciphertext,
|
||||
);
|
||||
}
|
||||
|
||||
let key;
|
||||
if (this.version === 4) {
|
||||
key = CryptoJS.MD5(
|
||||
digest.concat(CryptoJS.lib.WordArray.create([0x73416c54], 4))
|
||||
digest.concat(CryptoJS.lib.WordArray.create([0x73416c54], 4)),
|
||||
);
|
||||
} else {
|
||||
key = this.encryptionKey;
|
||||
@ -255,10 +255,10 @@ class PDFSecurity {
|
||||
const options = {
|
||||
mode: CryptoJS.mode.CBC,
|
||||
padding: CryptoJS.pad.Pkcs7,
|
||||
iv
|
||||
iv,
|
||||
};
|
||||
|
||||
return buffer =>
|
||||
return (buffer) =>
|
||||
wordArrayToBuffer(
|
||||
iv
|
||||
.clone()
|
||||
@ -266,9 +266,9 @@ class PDFSecurity {
|
||||
CryptoJS.AES.encrypt(
|
||||
CryptoJS.lib.WordArray.create(buffer),
|
||||
key,
|
||||
options
|
||||
).ciphertext
|
||||
)
|
||||
options,
|
||||
).ciphertext,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@ -331,7 +331,7 @@ function getUserPasswordR2(encryptionKey) {
|
||||
function getUserPasswordR3R4(documentId, encryptionKey) {
|
||||
const key = encryptionKey.clone();
|
||||
let cipher = CryptoJS.MD5(
|
||||
processPasswordR2R3R4().concat(CryptoJS.lib.WordArray.create(documentId))
|
||||
processPasswordR2R3R4().concat(CryptoJS.lib.WordArray.create(documentId)),
|
||||
);
|
||||
for (let i = 0; i < 20; i++) {
|
||||
const xorRound = Math.ceil(key.sigBytes / 4);
|
||||
@ -348,7 +348,7 @@ function getOwnerPasswordR2R3R4(
|
||||
r,
|
||||
keyBits,
|
||||
paddedUserPassword,
|
||||
paddedOwnerPassword
|
||||
paddedOwnerPassword,
|
||||
) {
|
||||
let digest = paddedOwnerPassword;
|
||||
let round = r >= 3 ? 51 : 1;
|
||||
@ -376,7 +376,7 @@ function getEncryptionKeyR2R3R4(
|
||||
documentId,
|
||||
paddedUserPassword,
|
||||
ownerPasswordEntry,
|
||||
permissions
|
||||
permissions,
|
||||
) {
|
||||
let key = paddedUserPassword
|
||||
.clone()
|
||||
@ -402,15 +402,15 @@ function getUserPasswordR5(processedUserPassword, generateRandomWordArray) {
|
||||
function getUserEncryptionKeyR5(
|
||||
processedUserPassword,
|
||||
userKeySalt,
|
||||
encryptionKey
|
||||
encryptionKey,
|
||||
) {
|
||||
const key = CryptoJS.SHA256(
|
||||
processedUserPassword.clone().concat(userKeySalt)
|
||||
processedUserPassword.clone().concat(userKeySalt),
|
||||
);
|
||||
const options = {
|
||||
mode: CryptoJS.mode.CBC,
|
||||
padding: CryptoJS.pad.NoPadding,
|
||||
iv: CryptoJS.lib.WordArray.create(null, 16)
|
||||
iv: CryptoJS.lib.WordArray.create(null, 16),
|
||||
};
|
||||
return CryptoJS.AES.encrypt(encryptionKey, key, options).ciphertext;
|
||||
}
|
||||
@ -418,7 +418,7 @@ function getUserEncryptionKeyR5(
|
||||
function getOwnerPasswordR5(
|
||||
processedOwnerPassword,
|
||||
userPasswordEntry,
|
||||
generateRandomWordArray
|
||||
generateRandomWordArray,
|
||||
) {
|
||||
const validationSalt = generateRandomWordArray(8);
|
||||
const keySalt = generateRandomWordArray(8);
|
||||
@ -426,7 +426,7 @@ function getOwnerPasswordR5(
|
||||
processedOwnerPassword
|
||||
.clone()
|
||||
.concat(validationSalt)
|
||||
.concat(userPasswordEntry)
|
||||
.concat(userPasswordEntry),
|
||||
)
|
||||
.concat(validationSalt)
|
||||
.concat(keySalt);
|
||||
@ -436,18 +436,18 @@ function getOwnerEncryptionKeyR5(
|
||||
processedOwnerPassword,
|
||||
ownerKeySalt,
|
||||
userPasswordEntry,
|
||||
encryptionKey
|
||||
encryptionKey,
|
||||
) {
|
||||
const key = CryptoJS.SHA256(
|
||||
processedOwnerPassword
|
||||
.clone()
|
||||
.concat(ownerKeySalt)
|
||||
.concat(userPasswordEntry)
|
||||
.concat(userPasswordEntry),
|
||||
);
|
||||
const options = {
|
||||
mode: CryptoJS.mode.CBC,
|
||||
padding: CryptoJS.pad.NoPadding,
|
||||
iv: CryptoJS.lib.WordArray.create(null, 16)
|
||||
iv: CryptoJS.lib.WordArray.create(null, 16),
|
||||
};
|
||||
return CryptoJS.AES.encrypt(encryptionKey, key, options).ciphertext;
|
||||
}
|
||||
@ -459,15 +459,15 @@ function getEncryptionKeyR5(generateRandomWordArray) {
|
||||
function getEncryptedPermissionsR5(
|
||||
permissions,
|
||||
encryptionKey,
|
||||
generateRandomWordArray
|
||||
generateRandomWordArray,
|
||||
) {
|
||||
const cipher = CryptoJS.lib.WordArray.create(
|
||||
[lsbFirstWord(permissions), 0xffffffff, 0x54616462],
|
||||
12
|
||||
12,
|
||||
).concat(generateRandomWordArray(4));
|
||||
const options = {
|
||||
mode: CryptoJS.mode.ECB,
|
||||
padding: CryptoJS.pad.NoPadding
|
||||
padding: CryptoJS.pad.NoPadding,
|
||||
};
|
||||
return CryptoJS.AES.encrypt(cipher, encryptionKey, options).ciphertext;
|
||||
}
|
||||
@ -516,45 +516,16 @@ function wordArrayToBuffer(wordArray) {
|
||||
const byteArray = [];
|
||||
for (let i = 0; i < wordArray.sigBytes; i++) {
|
||||
byteArray.push(
|
||||
(wordArray.words[Math.floor(i / 4)] >> (8 * (3 - (i % 4)))) & 0xff
|
||||
(wordArray.words[Math.floor(i / 4)] >> (8 * (3 - (i % 4)))) & 0xff,
|
||||
);
|
||||
}
|
||||
return Buffer.from(byteArray);
|
||||
}
|
||||
|
||||
const PASSWORD_PADDING = [
|
||||
0x28,
|
||||
0xbf,
|
||||
0x4e,
|
||||
0x5e,
|
||||
0x4e,
|
||||
0x75,
|
||||
0x8a,
|
||||
0x41,
|
||||
0x64,
|
||||
0x00,
|
||||
0x4e,
|
||||
0x56,
|
||||
0xff,
|
||||
0xfa,
|
||||
0x01,
|
||||
0x08,
|
||||
0x2e,
|
||||
0x2e,
|
||||
0x00,
|
||||
0xb6,
|
||||
0xd0,
|
||||
0x68,
|
||||
0x3e,
|
||||
0x80,
|
||||
0x2f,
|
||||
0x0c,
|
||||
0xa9,
|
||||
0xfe,
|
||||
0x64,
|
||||
0x53,
|
||||
0x69,
|
||||
0x7a
|
||||
0x28, 0xbf, 0x4e, 0x5e, 0x4e, 0x75, 0x8a, 0x41, 0x64, 0x00, 0x4e, 0x56, 0xff,
|
||||
0xfa, 0x01, 0x08, 0x2e, 0x2e, 0x00, 0xb6, 0xd0, 0x68, 0x3e, 0x80, 0x2f, 0x0c,
|
||||
0xa9, 0xfe, 0x64, 0x53, 0x69, 0x7a,
|
||||
];
|
||||
|
||||
export default PDFSecurity;
|
||||
|
||||
@ -10,11 +10,11 @@ export default class SpotColor {
|
||||
{
|
||||
Range: [0, 1, 0, 1, 0, 1, 0, 1],
|
||||
C0: [0, 0, 0, 0],
|
||||
C1: this.values.map(value => value / 100),
|
||||
C1: this.values.map((value) => value / 100),
|
||||
FunctionType: 2,
|
||||
Domain: [0, 1],
|
||||
N: 1
|
||||
}
|
||||
N: 1,
|
||||
},
|
||||
]);
|
||||
this.ref.end();
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@ PDFStructureElement - represents an element in the PDF logical structure tree
|
||||
By Ben Schmidt
|
||||
*/
|
||||
|
||||
import PDFStructureContent from "./structure_content";
|
||||
import PDFStructureContent from './structure_content';
|
||||
|
||||
class PDFStructureElement {
|
||||
constructor(document, type, options = {}, children = null) {
|
||||
@ -14,7 +14,7 @@ class PDFStructureElement {
|
||||
this._flushed = false;
|
||||
this.dictionary = document.ref({
|
||||
// Type: "StructElem",
|
||||
S: type
|
||||
S: type,
|
||||
});
|
||||
|
||||
const data = this.dictionary.data;
|
||||
@ -83,7 +83,8 @@ class PDFStructureElement {
|
||||
|
||||
_addContentToParentTree(content) {
|
||||
content.refs.forEach(({ pageRef, mcid }) => {
|
||||
const pageStructParents = this.document.getStructParentTree()
|
||||
const pageStructParents = this.document
|
||||
.getStructParentTree()
|
||||
.get(pageRef.data.StructParents);
|
||||
pageStructParents[mcid] = this.dictionary;
|
||||
});
|
||||
@ -133,9 +134,11 @@ class PDFStructureElement {
|
||||
}
|
||||
|
||||
_isValidChild(child) {
|
||||
return child instanceof PDFStructureElement ||
|
||||
child instanceof PDFStructureContent ||
|
||||
typeof child === 'function';
|
||||
return (
|
||||
child instanceof PDFStructureElement ||
|
||||
child instanceof PDFStructureContent ||
|
||||
typeof child === 'function'
|
||||
);
|
||||
}
|
||||
|
||||
_contentForClosure(closure) {
|
||||
@ -199,9 +202,9 @@ class PDFStructureElement {
|
||||
this.dictionary.data.K.push(mcid);
|
||||
} else {
|
||||
this.dictionary.data.K.push({
|
||||
Type: "MCR",
|
||||
Type: 'MCR',
|
||||
Pg: pageRef,
|
||||
MCID: mcid
|
||||
MCID: mcid,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@ -144,5 +144,7 @@ export function normalizedColumnStyle(defaultColStyle, colStyleInternal, i) {
|
||||
}
|
||||
|
||||
export function normalizeAlignment(align) {
|
||||
return align == null || typeof align === 'string' ? { x: align, y: align } : align;
|
||||
return align == null || typeof align === 'string'
|
||||
? { x: align, y: align }
|
||||
: align;
|
||||
}
|
||||
|
||||
@ -327,7 +327,7 @@ export function memoize(fn, maxSize) {
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isObject(item) {
|
||||
return item && typeof item === "object" && !Array.isArray(item);
|
||||
return item && typeof item === 'object' && !Array.isArray(item);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
11
lib/tree.js
11
lib/tree.js
@ -8,8 +8,7 @@ class PDFTree {
|
||||
constructor(options = {}) {
|
||||
this._items = {};
|
||||
// disable /Limits output for this tree
|
||||
this.limits =
|
||||
typeof options.limits === 'boolean' ? options.limits : true;
|
||||
this.limits = typeof options.limits === 'boolean' ? options.limits : true;
|
||||
}
|
||||
|
||||
add(key, val) {
|
||||
@ -23,7 +22,7 @@ class PDFTree {
|
||||
toString() {
|
||||
// Needs to be sorted by key
|
||||
const sortedKeys = Object.keys(this._items).sort((a, b) =>
|
||||
this._compareKeys(a, b)
|
||||
this._compareKeys(a, b),
|
||||
);
|
||||
|
||||
const out = ['<<'];
|
||||
@ -31,15 +30,15 @@ class PDFTree {
|
||||
const first = sortedKeys[0],
|
||||
last = sortedKeys[sortedKeys.length - 1];
|
||||
out.push(
|
||||
` /Limits ${PDFObject.convert([this._dataForKey(first), this._dataForKey(last)])}`
|
||||
` /Limits ${PDFObject.convert([this._dataForKey(first), this._dataForKey(last)])}`,
|
||||
);
|
||||
}
|
||||
out.push(` /${this._keysName()} [`);
|
||||
for (let key of sortedKeys) {
|
||||
out.push(
|
||||
` ${PDFObject.convert(this._dataForKey(key))} ${PDFObject.convert(
|
||||
this._items[key]
|
||||
)}`
|
||||
this._items[key],
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
out.push(']');
|
||||
|
||||
@ -10,7 +10,7 @@ class VirtualFileSystem {
|
||||
const data = this.fileData[virtualFileName];
|
||||
if (data == null) {
|
||||
throw new Error(
|
||||
`File '${virtualFileName}' not found in virtual file system`
|
||||
`File '${virtualFileName}' not found in virtual file system`,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -13,7 +13,7 @@ describe('acroform', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
doc = new PDFDocument({
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) }
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) },
|
||||
});
|
||||
});
|
||||
|
||||
@ -42,9 +42,9 @@ describe('acroform', () => {
|
||||
'>>',
|
||||
']',
|
||||
'>>',
|
||||
'>>'
|
||||
'>>',
|
||||
),
|
||||
'endobj'
|
||||
'endobj',
|
||||
];
|
||||
const docData = logData(doc);
|
||||
doc.addNamedJavaScript('name1', 'my javascript goes here');
|
||||
@ -92,9 +92,9 @@ describe('acroform', () => {
|
||||
'[10 292 602 692]',
|
||||
'/Border [0 0 0]',
|
||||
'/C [0 0 0]',
|
||||
'>>'
|
||||
'>>',
|
||||
),
|
||||
'endobj'
|
||||
'endobj',
|
||||
];
|
||||
|
||||
const docData = logData(doc);
|
||||
@ -124,13 +124,13 @@ describe('acroform', () => {
|
||||
const expected = [
|
||||
'10 0 obj',
|
||||
'<<\n/FT /Btn\n/Ff 65536\n/MK <<\n/CA (Test Button)\n/BG [1 1 0]\n>>\n/T (btn1)\n/Subtype /Widget\n/F 4\n/Type /Annot\n/Rect [20 742 120 772]\n/Border [0 0 0]\n/C [0 0 0]\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
];
|
||||
doc.initForm();
|
||||
const docData = logData(doc);
|
||||
let opts = {
|
||||
backgroundColor: 'yellow',
|
||||
label: 'Test Button'
|
||||
label: 'Test Button',
|
||||
};
|
||||
doc.formPushButton('btn1', 20, 20, 100, 30, opts);
|
||||
expect(docData.length).toBe(3);
|
||||
@ -147,7 +147,7 @@ describe('acroform', () => {
|
||||
'/JS (AFNumber_Keystroke\\(2,1,"MinusBlack",null,"$",true\\);)\n>>\n' +
|
||||
'/F <<\n/S /JavaScript\n/JS (AFNumber_Format\\(2,1,"MinusBlack",null,"$",true\\);)\n>>\n>>\n' +
|
||||
'/T (dollars)\n/Subtype /Widget\n/F 4\n/Type /Annot\n/Rect [20 752 70 772]\n/Border [0 0 0]\n/C [0 0 0]\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
];
|
||||
doc.initForm();
|
||||
const docData = logData(doc);
|
||||
@ -157,8 +157,8 @@ describe('acroform', () => {
|
||||
type: 'number',
|
||||
nDec: 2,
|
||||
currency: '$',
|
||||
currencyPrepend: true
|
||||
}
|
||||
currencyPrepend: true,
|
||||
},
|
||||
};
|
||||
doc.formText('dollars', 20, 20, 50, 20, opts);
|
||||
expect(docData.length).toBe(3);
|
||||
@ -171,7 +171,7 @@ describe('acroform', () => {
|
||||
'/JS (AFDate_KeystrokeEx\\(yyyy-mm-dd\\);)\n>>\n' +
|
||||
'/F <<\n/S /JavaScript\n/JS (AFDate_Format\\(yyyy-mm-dd\\);)\n>>\n>>\n' +
|
||||
'/T (date)\n/Subtype /Widget\n/F 4\n/Type /Annot\n/Rect [20 752 70 772]\n/Border [0 0 0]\n/C [0 0 0]\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
];
|
||||
doc.initForm();
|
||||
const docData = logData(doc);
|
||||
@ -179,8 +179,8 @@ describe('acroform', () => {
|
||||
value: '1999-12-31',
|
||||
format: {
|
||||
type: 'date',
|
||||
param: 'yyyy-mm-dd'
|
||||
}
|
||||
param: 'yyyy-mm-dd',
|
||||
},
|
||||
};
|
||||
doc.formText('date', 20, 20, 50, 20, opts);
|
||||
expect(docData.length).toBe(3);
|
||||
@ -194,7 +194,7 @@ describe('acroform', () => {
|
||||
'<<\n/FT /Tx\n' +
|
||||
'/Ff 4206599\n/Q 1\n' +
|
||||
'/T (flags)\n/Subtype /Widget\n/F 4\n/Type /Annot\n/Rect [20 752 70 772]\n/Border [0 0 0]\n/C [0 0 0]\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
];
|
||||
doc.initForm();
|
||||
const docData = logData(doc);
|
||||
@ -205,7 +205,7 @@ describe('acroform', () => {
|
||||
align: 'center',
|
||||
multiline: true,
|
||||
password: true,
|
||||
noSpell: true
|
||||
noSpell: true,
|
||||
};
|
||||
doc.formText('flags', 20, 20, 50, 20, opts);
|
||||
expect(docData.length).toBe(3);
|
||||
@ -214,12 +214,12 @@ describe('acroform', () => {
|
||||
|
||||
test('false flags should be ignored', () => {
|
||||
const expectedDoc = new PDFDocument({
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) }
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) },
|
||||
});
|
||||
expectedDoc.initForm();
|
||||
const expectedDocData = logData(expectedDoc);
|
||||
let emptyOpts = {
|
||||
align: 'center'
|
||||
align: 'center',
|
||||
};
|
||||
expectedDoc.formText('flags', 20, 20, 50, 20, emptyOpts);
|
||||
|
||||
@ -232,7 +232,7 @@ describe('acroform', () => {
|
||||
align: 'center',
|
||||
multiline: false,
|
||||
password: false,
|
||||
noSpell: false
|
||||
noSpell: false,
|
||||
};
|
||||
doc.formText('flags', 20, 20, 50, 20, opts);
|
||||
|
||||
@ -260,13 +260,13 @@ describe('acroform', () => {
|
||||
'/C [0 0 0]\n' +
|
||||
'/FontSize 16\n' +
|
||||
'>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
];
|
||||
doc.registerFont('myfont1', 'tests/fonts/Roboto-Regular.ttf');
|
||||
doc.initForm();
|
||||
const docData = logData(doc);
|
||||
let opts = {
|
||||
fontSize: 16
|
||||
fontSize: 16,
|
||||
};
|
||||
doc.font('myfont1').formText('text', 20, 20, 50, 20, opts);
|
||||
expect(docData.length).toBe(3);
|
||||
@ -283,7 +283,7 @@ describe('acroform', () => {
|
||||
'endobj',
|
||||
'15 0 obj',
|
||||
'<<\n/Parent 12 0 R\n/FT /Tx\n/T (leaf3)\n/Subtype /Widget\n/F 4\n/Type /Annot\n/Rect [10 642 210 682]\n/Border [0 0 0]\n/C [0 0 0]\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
];
|
||||
const expected2 = [
|
||||
'11 0 obj',
|
||||
@ -297,7 +297,7 @@ describe('acroform', () => {
|
||||
'endobj',
|
||||
'9 0 obj',
|
||||
'<<\n/Fields [10 0 R]\n/NeedAppearances true\n/DA (/F1 0 Tf 0 g)\n/DR <<\n/Font <<\n/F1 8 0 R\n>>\n>>\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
];
|
||||
|
||||
const docData = logData(doc);
|
||||
|
||||
@ -12,7 +12,7 @@ describe('Annotations', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
document = new PDFDocument({
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) }
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) },
|
||||
});
|
||||
});
|
||||
|
||||
@ -29,7 +29,7 @@ describe('Annotations', () => {
|
||||
`<<
|
||||
/S /GoTo
|
||||
/D [7 0 R /XYZ null null null]
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -45,7 +45,7 @@ describe('Annotations', () => {
|
||||
`<<
|
||||
/S /URI
|
||||
/URI (http://www.example.com)
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -56,7 +56,7 @@ describe('Annotations', () => {
|
||||
|
||||
document.text('Go to url', {
|
||||
link: 'http://www.example.com',
|
||||
continued: true
|
||||
continued: true,
|
||||
});
|
||||
document.text('continued link');
|
||||
|
||||
@ -65,7 +65,7 @@ describe('Annotations', () => {
|
||||
`<<
|
||||
/S /URI
|
||||
/URI (http://www.example.com)
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -73,7 +73,7 @@ describe('Annotations', () => {
|
||||
`<<
|
||||
/S /URI
|
||||
/URI (http://www.example.com)
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -84,7 +84,7 @@ describe('Annotations', () => {
|
||||
|
||||
document.text('Go to url', {
|
||||
link: 'http://www.example.com',
|
||||
continued: true
|
||||
continued: true,
|
||||
});
|
||||
document.text('no continued link', { link: null });
|
||||
|
||||
@ -94,7 +94,7 @@ describe('Annotations', () => {
|
||||
`<<
|
||||
/S /URI
|
||||
/URI (http://www.example.com)
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
|
||||
expect(docData).not.toContainChunk([`14 0 obj`]);
|
||||
@ -107,7 +107,7 @@ describe('Annotations', () => {
|
||||
|
||||
document.fileAnnotation(100, 100, 20, 20, {
|
||||
src: Buffer.from('example text'),
|
||||
name: 'file.txt'
|
||||
name: 'file.txt',
|
||||
});
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -119,7 +119,7 @@ describe('Annotations', () => {
|
||||
/Rect [100 672 120 692]
|
||||
/Border [0 0 0]
|
||||
/C [0 0 0]
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -129,7 +129,7 @@ describe('Annotations', () => {
|
||||
document.fileAnnotation(100, 100, 20, 20, {
|
||||
src: Buffer.from('example text'),
|
||||
name: 'file.txt',
|
||||
description: 'file description'
|
||||
description: 'file description',
|
||||
});
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -142,7 +142,7 @@ describe('Annotations', () => {
|
||||
/Rect [100 672 120 692]
|
||||
/Border [0 0 0]
|
||||
/C [0 0 0]
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -157,11 +157,11 @@ describe('Annotations', () => {
|
||||
{
|
||||
src: Buffer.from('example text'),
|
||||
name: 'file.txt',
|
||||
description: 'file description'
|
||||
description: 'file description',
|
||||
},
|
||||
{
|
||||
Contents: 'other description'
|
||||
}
|
||||
Contents: 'other description',
|
||||
},
|
||||
);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -174,7 +174,7 @@ describe('Annotations', () => {
|
||||
/Rect [100 672 120 692]
|
||||
/Border [0 0 0]
|
||||
/C [0 0 0]
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -15,7 +15,7 @@ describe('file', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
document = new PDFDocument({
|
||||
info: { CreationDate: date }
|
||||
info: { CreationDate: date },
|
||||
});
|
||||
});
|
||||
|
||||
@ -26,13 +26,11 @@ describe('file', () => {
|
||||
name: 'file.txt',
|
||||
type: 'text/plain',
|
||||
creationDate: date,
|
||||
modifiedDate: date
|
||||
modifiedDate: date,
|
||||
});
|
||||
document.end();
|
||||
|
||||
const md5 = createHash('md5')
|
||||
.update('example text')
|
||||
.digest('hex');
|
||||
const md5 = createHash('md5').update('example text').digest('hex');
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
`8 0 obj`,
|
||||
@ -47,7 +45,7 @@ describe('file', () => {
|
||||
/Subtype /text#2Fplain
|
||||
/Length 20
|
||||
/Filter /FlateDecode
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -60,7 +58,7 @@ describe('file', () => {
|
||||
/F 8 0 R
|
||||
>>
|
||||
/UF (file.txt)
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -75,7 +73,7 @@ describe('file', () => {
|
||||
(file.txt) 9 0 R
|
||||
]
|
||||
>>
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -86,13 +84,11 @@ describe('file', () => {
|
||||
name: 'file.txt',
|
||||
creationDate: date,
|
||||
modifiedDate: date,
|
||||
description: 'file description'
|
||||
description: 'file description',
|
||||
});
|
||||
document.end();
|
||||
|
||||
const md5 = createHash('md5')
|
||||
.update('example text')
|
||||
.digest('hex');
|
||||
const md5 = createHash('md5').update('example text').digest('hex');
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
`8 0 obj`,
|
||||
@ -106,7 +102,7 @@ describe('file', () => {
|
||||
>>
|
||||
/Length 20
|
||||
/Filter /FlateDecode
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -120,7 +116,7 @@ describe('file', () => {
|
||||
>>
|
||||
/UF (file.txt)
|
||||
/Desc (file description)
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -131,13 +127,11 @@ describe('file', () => {
|
||||
name: 'file.txt',
|
||||
creationDate: date,
|
||||
modifiedDate: date,
|
||||
hidden: true
|
||||
hidden: true,
|
||||
});
|
||||
document.end();
|
||||
|
||||
const md5 = createHash('md5')
|
||||
.update('example text')
|
||||
.digest('hex');
|
||||
const md5 = createHash('md5').update('example text').digest('hex');
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
`8 0 obj`,
|
||||
@ -151,7 +145,7 @@ describe('file', () => {
|
||||
>>
|
||||
/Length 20
|
||||
/Filter /FlateDecode
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
|
||||
// hidden: do not add to /EmbeddedFiles
|
||||
@ -162,7 +156,7 @@ describe('file', () => {
|
||||
/Names [
|
||||
]
|
||||
>>
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -172,12 +166,12 @@ describe('file', () => {
|
||||
document.file(Buffer.from('example text'), {
|
||||
name: 'file1.txt',
|
||||
creationDate: date,
|
||||
modifiedDate: date
|
||||
modifiedDate: date,
|
||||
});
|
||||
document.file(Buffer.from('example text'), {
|
||||
name: 'file2.txt',
|
||||
creationDate: date,
|
||||
modifiedDate: date
|
||||
modifiedDate: date,
|
||||
});
|
||||
document.end();
|
||||
|
||||
@ -194,7 +188,7 @@ describe('file', () => {
|
||||
(file2.txt) 11 0 R
|
||||
]
|
||||
>>
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -204,18 +198,21 @@ describe('file', () => {
|
||||
document.file(Buffer.from('example text'), {
|
||||
name: 'file1.txt',
|
||||
creationDate: date,
|
||||
modifiedDate: date
|
||||
modifiedDate: date,
|
||||
});
|
||||
document.file(Buffer.from('example text'), {
|
||||
name: 'file1.txt',
|
||||
creationDate: new Date(date),
|
||||
modifiedDate: new Date(date)
|
||||
modifiedDate: new Date(date),
|
||||
});
|
||||
document.end();
|
||||
|
||||
const numFiles = docData.filter((str) => typeof str === 'string' && str.startsWith('<<\n/Type /EmbeddedFile\n'))
|
||||
const numFiles = docData.filter(
|
||||
(str) =>
|
||||
typeof str === 'string' && str.startsWith('<<\n/Type /EmbeddedFile\n'),
|
||||
);
|
||||
|
||||
expect(numFiles.length).toEqual(1)
|
||||
expect(numFiles.length).toEqual(1);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
`2 0 obj`,
|
||||
@ -229,7 +226,7 @@ describe('file', () => {
|
||||
(file1.txt) 10 0 R
|
||||
]
|
||||
>>
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import PDFDocument from '../../lib/document';
|
||||
import { logData } from './helpers';
|
||||
|
||||
describe('color', function() {
|
||||
test('normalize', function() {
|
||||
describe('color', function () {
|
||||
test('normalize', function () {
|
||||
const doc = new PDFDocument();
|
||||
|
||||
expect(doc._normalizeColor('#FFF')).toEqual([1, 1, 1]);
|
||||
@ -10,34 +10,24 @@ describe('color', function() {
|
||||
expect(doc._normalizeColor('#000')).toEqual([0, 0, 0]);
|
||||
expect(doc._normalizeColor('#000000')).toEqual([0, 0, 0]);
|
||||
expect(doc._normalizeColor('#6F6FEF')).toEqual([
|
||||
0.43529411764705883,
|
||||
0.43529411764705883,
|
||||
0.9372549019607843
|
||||
0.43529411764705883, 0.43529411764705883, 0.9372549019607843,
|
||||
]);
|
||||
|
||||
expect(doc._normalizeColor([255, 255, 255])).toEqual([1, 1, 1]);
|
||||
expect(doc._normalizeColor([255, 255, 255, 255])).toEqual([
|
||||
2.55,
|
||||
2.55,
|
||||
2.55,
|
||||
2.55
|
||||
2.55, 2.55, 2.55, 2.55,
|
||||
]);
|
||||
expect(doc._normalizeColor([0, 0, 0])).toEqual([0, 0, 0]);
|
||||
expect(doc._normalizeColor([0, 0, 0, 0])).toEqual([0, 0, 0, 0]);
|
||||
expect(doc._normalizeColor([128, 10, 18])).toEqual([
|
||||
0.5019607843137255,
|
||||
0.0392156862745098,
|
||||
0.07058823529411765
|
||||
0.5019607843137255, 0.0392156862745098, 0.07058823529411765,
|
||||
]);
|
||||
expect(doc._normalizeColor([128, 10, 18, 100])).toEqual([
|
||||
1.28,
|
||||
0.1,
|
||||
0.18,
|
||||
1
|
||||
1.28, 0.1, 0.18, 1,
|
||||
]);
|
||||
});
|
||||
|
||||
test('normalize with spot color', function() {
|
||||
test('normalize with spot color', function () {
|
||||
const doc = new PDFDocument();
|
||||
doc.addSpotColor('PANTONE 123 C', 0.1, 0.2, 0.3, 0.4);
|
||||
|
||||
@ -46,25 +36,24 @@ describe('color', function() {
|
||||
expect(color.values).toEqual([0.1, 0.2, 0.3, 0.4]);
|
||||
});
|
||||
|
||||
test('spot color', function() {
|
||||
const doc = new PDFDocument();
|
||||
const data = logData(doc);
|
||||
doc.addSpotColor('PANTONE185C', 0, 100, 78, 9)
|
||||
doc.fillColor('PANTONE185C')
|
||||
.text('This text uses spot color!');
|
||||
doc.end();
|
||||
test('spot color', function () {
|
||||
const doc = new PDFDocument();
|
||||
const data = logData(doc);
|
||||
doc.addSpotColor('PANTONE185C', 0, 100, 78, 9);
|
||||
doc.fillColor('PANTONE185C').text('This text uses spot color!');
|
||||
doc.end();
|
||||
|
||||
expect(data).toContainChunk([
|
||||
`6 0 obj`,
|
||||
'<<\n' +
|
||||
'/ProcSet [/PDF /Text /ImageB /ImageC /ImageI]\n' +
|
||||
'/ColorSpace <<\n' +
|
||||
'/CS0 8 0 R\n' +
|
||||
'>>\n' +
|
||||
'/Font <<\n' +
|
||||
'/F1 9 0 R\n' +
|
||||
'>>\n' +
|
||||
'>>',
|
||||
]);
|
||||
expect(data).toContainChunk([
|
||||
`6 0 obj`,
|
||||
'<<\n' +
|
||||
'/ProcSet [/PDF /Text /ImageB /ImageC /ImageI]\n' +
|
||||
'/ColorSpace <<\n' +
|
||||
'/CS0 8 0 R\n' +
|
||||
'>>\n' +
|
||||
'/Font <<\n' +
|
||||
'/F1 9 0 R\n' +
|
||||
'>>\n' +
|
||||
'>>',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -37,35 +37,34 @@ describe('PDFDocument', () => {
|
||||
describe('document info', () => {
|
||||
test('accepts properties with value undefined', () => {
|
||||
expect(() => new PDFDocument({ info: { Title: undefined } })).not.toThrow(
|
||||
new TypeError("Cannot read property 'toString' of undefined")
|
||||
new TypeError("Cannot read property 'toString' of undefined"),
|
||||
);
|
||||
});
|
||||
|
||||
test('accepts properties with value null', () => {
|
||||
expect(() => new PDFDocument({ info: { Title: null } })).not.toThrow(
|
||||
new TypeError("Cannot read property 'toString' of null")
|
||||
new TypeError("Cannot read property 'toString' of null"),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('metadata is present for PDF 1.4', () => {
|
||||
let doc = new PDFDocument({pdfVersion: '1.4'});
|
||||
let doc = new PDFDocument({ pdfVersion: '1.4' });
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
|
||||
let catalog = data[data.length-28];
|
||||
let catalog = data[data.length - 28];
|
||||
|
||||
expect(catalog).toContain('/Metadata');
|
||||
});
|
||||
|
||||
test('metadata is NOT present for PDF 1.3', () => {
|
||||
let doc = new PDFDocument({pdfVersion: '1.3'});
|
||||
let doc = new PDFDocument({ pdfVersion: '1.3' });
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
|
||||
let catalog = data[data.length-27];
|
||||
let catalog = data[data.length - 27];
|
||||
|
||||
expect(catalog).not.toContain('/Metadata');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@ -7,7 +7,7 @@ describe('EmbeddedFont', () => {
|
||||
const document = new PDFDocument();
|
||||
const font = PDFFontFactory.open(
|
||||
document,
|
||||
'tests/fonts/Roboto-Regular.ttf'
|
||||
'tests/fonts/Roboto-Regular.ttf',
|
||||
);
|
||||
const runSpy = jest.spyOn(font, 'layoutRun');
|
||||
|
||||
@ -23,7 +23,7 @@ describe('EmbeddedFont', () => {
|
||||
const document = new PDFDocument({ fontLayoutCache: false });
|
||||
const font = PDFFontFactory.open(
|
||||
document,
|
||||
'tests/fonts/Roboto-Regular.ttf'
|
||||
'tests/fonts/Roboto-Regular.ttf',
|
||||
);
|
||||
const runSpy = jest.spyOn(font, 'layoutRun');
|
||||
|
||||
@ -42,7 +42,7 @@ describe('EmbeddedFont', () => {
|
||||
document,
|
||||
'tests/fonts/Roboto-Regular.ttf',
|
||||
undefined,
|
||||
'F1099'
|
||||
'F1099',
|
||||
);
|
||||
const dictionary = {
|
||||
end: () => {},
|
||||
@ -61,7 +61,7 @@ describe('EmbeddedFont', () => {
|
||||
doc,
|
||||
'tests/fonts/Roboto-Regular.ttf',
|
||||
undefined,
|
||||
'F1099'
|
||||
'F1099',
|
||||
);
|
||||
|
||||
// 398 different glyphs
|
||||
@ -80,15 +80,19 @@ describe('EmbeddedFont', () => {
|
||||
|
||||
const docData = logData(doc);
|
||||
font.toUnicodeCmap();
|
||||
const text = docData.map((d) => d.toString("utf8")).join("");
|
||||
const text = docData.map((d) => d.toString('utf8')).join('');
|
||||
|
||||
let glyphs = 0
|
||||
for (const block of text.matchAll(/beginbfrange\n((?:.|\n)*?)\nendbfrange/g)) {
|
||||
for (const line of block[1].matchAll(/^<([0-9a-f]+)>\s+<([0-9a-f]+)>\s+\[/igm)) {
|
||||
let glyphs = 0;
|
||||
for (const block of text.matchAll(
|
||||
/beginbfrange\n((?:.|\n)*?)\nendbfrange/g,
|
||||
)) {
|
||||
for (const line of block[1].matchAll(
|
||||
/^<([0-9a-f]+)>\s+<([0-9a-f]+)>\s+\[/gim,
|
||||
)) {
|
||||
const low = parseInt(line[1], 16);
|
||||
const high = parseInt(line[2], 16);
|
||||
glyphs += high - low + 1;
|
||||
expect(high & 0xFFFFFF00).toBe(low & 0xFFFFFF00);
|
||||
expect(high & 0xffffff00).toBe(low & 0xffffff00);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,22 +1,19 @@
|
||||
import PDFDocument from '../../lib/document';
|
||||
import { logData } from './helpers';
|
||||
|
||||
describe('Gradient', function() {
|
||||
describe('Gradient', function () {
|
||||
let document;
|
||||
|
||||
beforeEach(() => {
|
||||
document = new PDFDocument({
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) }
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) },
|
||||
});
|
||||
});
|
||||
|
||||
test('Multiple stops', () => {
|
||||
const docData = logData(document);
|
||||
const gradient = document.linearGradient(0, 0, 300, 0);
|
||||
gradient
|
||||
.stop(0, 'green')
|
||||
.stop(0.5, 'red')
|
||||
.stop(1, 'green');
|
||||
gradient.stop(0, 'green').stop(0.5, 'red').stop(1, 'green');
|
||||
document.rect(0, 0, 300, 300).fill(gradient);
|
||||
document.end();
|
||||
|
||||
@ -28,7 +25,7 @@ describe('Gradient', function() {
|
||||
/C0 [0 0.501961 0]
|
||||
/C1 [1 0 0]
|
||||
/N 1
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
'9 0 obj',
|
||||
@ -38,7 +35,7 @@ describe('Gradient', function() {
|
||||
/C0 [1 0 0]
|
||||
/C1 [0 0.501961 0]
|
||||
/N 1
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -49,7 +46,7 @@ describe('Gradient', function() {
|
||||
/Functions [8 0 R 9 0 R]
|
||||
/Bounds [0.5]
|
||||
/Encode [0 1 0 1]
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -60,7 +57,7 @@ describe('Gradient', function() {
|
||||
/Coords [0 0 300 0]
|
||||
/Function 10 0 R
|
||||
/Extend [true true]
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -70,7 +67,7 @@ describe('Gradient', function() {
|
||||
/PatternType 2
|
||||
/Shading 11 0 R
|
||||
/Matrix [1 0 0 -1 0 792]
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
function logData(doc) {
|
||||
const loggedData = [];
|
||||
const originalMethod = doc._write;
|
||||
doc._write = function(data) {
|
||||
doc._write = function (data) {
|
||||
loggedData.push(data);
|
||||
originalMethod.call(this, data);
|
||||
};
|
||||
@ -13,11 +13,9 @@ function escapeRegExp(string) {
|
||||
}
|
||||
|
||||
function joinTokens(...args) {
|
||||
let a = args.map(i => escapeRegExp(i));
|
||||
let a = args.map((i) => escapeRegExp(i));
|
||||
let r = new RegExp('^' + a.join('\\s*') + '$');
|
||||
return r;
|
||||
}
|
||||
|
||||
export { logData, joinTokens }
|
||||
|
||||
|
||||
export { logData, joinTokens };
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import PDFDocument from "../../lib/document";
|
||||
import PDFDocument from '../../lib/document';
|
||||
import LineWrapper from '../../lib/line_wrapper';
|
||||
|
||||
describe("LineWrapper", () => {
|
||||
describe('LineWrapper', () => {
|
||||
let document;
|
||||
|
||||
beforeEach(() => {
|
||||
@ -11,52 +11,63 @@ describe("LineWrapper", () => {
|
||||
});
|
||||
});
|
||||
|
||||
test("ellipsis is present only on last line of multiline text", () => {
|
||||
test('ellipsis is present only on last line of multiline text', () => {
|
||||
// There is a weird edge case where ellipsis occurs on lines
|
||||
// in the middle of text due to number rounding errors
|
||||
//
|
||||
// There is probably a simpler combination of values but this is one I found in the wild
|
||||
document.y = 402.1999999999999;
|
||||
document.fontSize(7.26643598615917)
|
||||
const wrapper = new LineWrapper(document, {width: 300, height: 50.399999999999864, ellipsis: true})
|
||||
let wrapperOutput = "";
|
||||
wrapper.on("line", (buffer) => {
|
||||
document.fontSize(7.26643598615917);
|
||||
const wrapper = new LineWrapper(document, {
|
||||
width: 300,
|
||||
height: 50.399999999999864,
|
||||
ellipsis: true,
|
||||
});
|
||||
let wrapperOutput = '';
|
||||
wrapper.on('line', (buffer) => {
|
||||
wrapperOutput += buffer;
|
||||
document.y += document.currentLineHeight(true)
|
||||
})
|
||||
wrapper.wrap("- A\n- B\n- C\n- D\n- E\n- F", {})
|
||||
expect(wrapperOutput).toBe("- A\n- B\n- C\n- D\n- E\n- F");
|
||||
})
|
||||
|
||||
test("line break is handled correctly when at weird heights", () => {
|
||||
// There is probably a simpler combination of values but this is one I found in the wild
|
||||
document.y = 1/3;
|
||||
document.fontSize(Math.fround(42.3/3));
|
||||
let lineHeight = document.currentLineHeight(true);
|
||||
const wrapper = new LineWrapper(document, {width: 300, height:lineHeight*3})
|
||||
let wrapperOutput = "";
|
||||
wrapper.on("line", (buffer) => {
|
||||
wrapperOutput += buffer;
|
||||
document.y += lineHeight
|
||||
})
|
||||
// Limit to 3/4 lines
|
||||
wrapper.wrap("A\nB\nC\nD", {})
|
||||
expect(wrapperOutput).toBe("A\nB\nC\n");
|
||||
document.y += document.currentLineHeight(true);
|
||||
});
|
||||
wrapper.wrap('- A\n- B\n- C\n- D\n- E\n- F', {});
|
||||
expect(wrapperOutput).toBe('- A\n- B\n- C\n- D\n- E\n- F');
|
||||
});
|
||||
|
||||
test("line break is handled correctly with ellipsis", () => {
|
||||
test('line break is handled correctly when at weird heights', () => {
|
||||
// There is probably a simpler combination of values but this is one I found in the wild
|
||||
document.y = 1/3;
|
||||
document.fontSize(Math.fround(42.3/3));
|
||||
document.y = 1 / 3;
|
||||
document.fontSize(Math.fround(42.3 / 3));
|
||||
let lineHeight = document.currentLineHeight(true);
|
||||
const wrapper = new LineWrapper(document, {width: 300, height:lineHeight*3, ellipsis: true})
|
||||
let wrapperOutput = "";
|
||||
wrapper.on("line", (buffer) => {
|
||||
const wrapper = new LineWrapper(document, {
|
||||
width: 300,
|
||||
height: lineHeight * 3,
|
||||
});
|
||||
let wrapperOutput = '';
|
||||
wrapper.on('line', (buffer) => {
|
||||
wrapperOutput += buffer;
|
||||
document.y += lineHeight
|
||||
})
|
||||
document.y += lineHeight;
|
||||
});
|
||||
// Limit to 3/4 lines
|
||||
wrapper.wrap("A\nB\nC\nD", {})
|
||||
expect(wrapperOutput).toBe("A\nB\nC…");
|
||||
wrapper.wrap('A\nB\nC\nD', {});
|
||||
expect(wrapperOutput).toBe('A\nB\nC\n');
|
||||
});
|
||||
|
||||
test('line break is handled correctly with ellipsis', () => {
|
||||
// There is probably a simpler combination of values but this is one I found in the wild
|
||||
document.y = 1 / 3;
|
||||
document.fontSize(Math.fround(42.3 / 3));
|
||||
let lineHeight = document.currentLineHeight(true);
|
||||
const wrapper = new LineWrapper(document, {
|
||||
width: 300,
|
||||
height: lineHeight * 3,
|
||||
ellipsis: true,
|
||||
});
|
||||
let wrapperOutput = '';
|
||||
wrapper.on('line', (buffer) => {
|
||||
wrapperOutput += buffer;
|
||||
document.y += lineHeight;
|
||||
});
|
||||
// Limit to 3/4 lines
|
||||
wrapper.wrap('A\nB\nC\nD', {});
|
||||
expect(wrapperOutput).toBe('A\nB\nC…');
|
||||
});
|
||||
});
|
||||
|
||||
@ -7,7 +7,7 @@ describe('Markings', () => {
|
||||
beforeEach(() => {
|
||||
document = new PDFDocument({
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) },
|
||||
compress: false
|
||||
compress: false,
|
||||
});
|
||||
});
|
||||
|
||||
@ -20,10 +20,10 @@ describe('Markings', () => {
|
||||
/Span BMC
|
||||
EMC
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
document.markContent("Span");
|
||||
document.markContent('Span');
|
||||
document.endMarkedContent();
|
||||
document.end();
|
||||
|
||||
@ -35,7 +35,7 @@ EMC
|
||||
`stream`,
|
||||
stream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -53,12 +53,12 @@ EMC
|
||||
>> BDC
|
||||
EMC
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
const structureContent1 = document.markStructureContent("Span");
|
||||
const structureContent1 = document.markStructureContent('Span');
|
||||
document.endMarkedContent();
|
||||
const structureContent2 = document.markStructureContent("Span");
|
||||
const structureContent2 = document.markStructureContent('Span');
|
||||
document.endMarkedContent();
|
||||
document.end();
|
||||
|
||||
@ -76,7 +76,7 @@ EMC
|
||||
`stream`,
|
||||
stream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -94,11 +94,11 @@ EMC
|
||||
>> BDC
|
||||
EMC
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
document.addStructure(document.struct('Span', () => {}));
|
||||
document.addStructure(document.struct('Span', () => {}));
|
||||
document.addStructure(document.struct('Span', () => {}));
|
||||
document.addStructure(document.struct('Span', () => {}));
|
||||
document.end();
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -109,7 +109,7 @@ EMC
|
||||
`stream`,
|
||||
stream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -132,20 +132,20 @@ EMC
|
||||
>> BDC
|
||||
EMC
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
document.markContent("Artifact", {
|
||||
type: "Pagination",
|
||||
document.markContent('Artifact', {
|
||||
type: 'Pagination',
|
||||
bbox: [40, 50, 570, 70],
|
||||
attached: [ "Top" ]
|
||||
attached: ['Top'],
|
||||
});
|
||||
document.endMarkedContent();
|
||||
document.markContent("Span", {
|
||||
lang: "en-AU",
|
||||
alt: "Hi, earth! ",
|
||||
actual: "Hello, world! ",
|
||||
expanded: "Greetings, terrestrial sphere! "
|
||||
document.markContent('Span', {
|
||||
lang: 'en-AU',
|
||||
alt: 'Hi, earth! ',
|
||||
actual: 'Hello, world! ',
|
||||
expanded: 'Greetings, terrestrial sphere! ',
|
||||
});
|
||||
document.endMarkedContent();
|
||||
document.end();
|
||||
@ -158,7 +158,7 @@ EMC
|
||||
`stream`,
|
||||
stream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -192,17 +192,17 @@ EMC
|
||||
EMC
|
||||
EMC
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
document.markContent("Span");
|
||||
document.markStructureContent("P");
|
||||
document.markContent("Span");
|
||||
document.markStructureContent("P");
|
||||
document.markContent("Artifact");
|
||||
document.markContent("Artifact");
|
||||
document.markStructureContent("P");
|
||||
document.markStructureContent("P");
|
||||
document.markContent('Span');
|
||||
document.markStructureContent('P');
|
||||
document.markContent('Span');
|
||||
document.markStructureContent('P');
|
||||
document.markContent('Artifact');
|
||||
document.markContent('Artifact');
|
||||
document.markStructureContent('P');
|
||||
document.markStructureContent('P');
|
||||
document.end();
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -213,7 +213,7 @@ EMC
|
||||
`stream`,
|
||||
stream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -229,12 +229,12 @@ EMC
|
||||
EMC
|
||||
EMC
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
const structureContent = document.markStructureContent("P");
|
||||
document.markContent("Span");
|
||||
document.text("on the first page");
|
||||
const structureContent = document.markStructureContent('P');
|
||||
document.markContent('Span');
|
||||
document.text('on the first page');
|
||||
document.continueOnNewPage();
|
||||
document.end();
|
||||
|
||||
@ -251,7 +251,7 @@ EMC
|
||||
`stream`,
|
||||
stream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
@ -260,23 +260,21 @@ EMC
|
||||
test('atomically constructed', () => {
|
||||
const docData = logData(document);
|
||||
|
||||
const pContent1 = document.markStructureContent("P");
|
||||
const linkContent = document.markStructureContent("Link");
|
||||
const pContent2 = document.markStructureContent("P");
|
||||
const pContent3 = document.markStructureContent("P");
|
||||
document.markContent("Span");
|
||||
const pContent1 = document.markStructureContent('P');
|
||||
const linkContent = document.markStructureContent('Link');
|
||||
const pContent2 = document.markStructureContent('P');
|
||||
const pContent3 = document.markStructureContent('P');
|
||||
document.markContent('Span');
|
||||
|
||||
const section1 = document.struct('Sect', [
|
||||
document.struct('P', [
|
||||
pContent1,
|
||||
document.struct('Link', linkContent),
|
||||
pContent2
|
||||
])
|
||||
pContent2,
|
||||
]),
|
||||
]);
|
||||
const section2 = document.struct('Sect', [
|
||||
document.struct('P', [
|
||||
pContent3
|
||||
])
|
||||
document.struct('P', [pContent3]),
|
||||
]);
|
||||
document.addStructure(section1).addStructure(section2);
|
||||
|
||||
@ -285,13 +283,9 @@ EMC
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/StructTreeRoot 8 0 R/,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/MarkInfo 9 0 R/,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([`3 0 obj`, /\/MarkInfo 9 0 R/, `endobj`]);
|
||||
expect(docData).toContainChunk([
|
||||
`8 0 obj`,
|
||||
`<<
|
||||
@ -304,48 +298,44 @@ EMC
|
||||
/ParentTreeNextKey 1
|
||||
/K [12 0 R 14 0 R]
|
||||
>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`12 0 obj`,
|
||||
`<<\n/S /Sect\n/P 8 0 R\n/K [11 0 R]\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/S /P\n/P 12 0 R\n/K [0 10 0 R 2]\n/Pg 7 0 R\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`10 0 obj`,
|
||||
`<<\n/S /Link\n/P 11 0 R\n/K [1]\n/Pg 7 0 R\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`14 0 obj`,
|
||||
`<<\n/S /Sect\n/P 8 0 R\n/K [13 0 R]\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`13 0 obj`,
|
||||
`<<\n/S /P\n/P 14 0 R\n/K [3]\n/Pg 7 0 R\n>>`,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`9 0 obj`,
|
||||
`<<\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([`9 0 obj`, `<<\n>>`, `endobj`]);
|
||||
});
|
||||
|
||||
test('incrementally constructed', () => {
|
||||
const docData = logData(document);
|
||||
|
||||
const pContent1 = document.markStructureContent("P");
|
||||
const linkContent = document.markStructureContent("Link");
|
||||
const pContent2 = document.markStructureContent("P");
|
||||
const pContent3 = document.markStructureContent("P");
|
||||
document.markContent("Span");
|
||||
const pContent1 = document.markStructureContent('P');
|
||||
const linkContent = document.markStructureContent('Link');
|
||||
const pContent2 = document.markStructureContent('P');
|
||||
const pContent3 = document.markStructureContent('P');
|
||||
document.markContent('Span');
|
||||
|
||||
const section1 = document.struct('Sect');
|
||||
document.addStructure(section1);
|
||||
@ -365,13 +355,9 @@ EMC
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/StructTreeRoot 8 0 R/,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/MarkInfo 9 0 R/,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([`3 0 obj`, /\/MarkInfo 9 0 R/, `endobj`]);
|
||||
expect(docData).toContainChunk([
|
||||
`8 0 obj`,
|
||||
`<<
|
||||
@ -384,38 +370,34 @@ EMC
|
||||
/ParentTreeNextKey 1
|
||||
/K [10 0 R 11 0 R]
|
||||
>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`10 0 obj`,
|
||||
`<<\n/S /Sect\n/P 8 0 R\n/K [13 0 R]\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`13 0 obj`,
|
||||
`<<\n/S /P\n/P 10 0 R\n/K [0 12 0 R 2]\n/Pg 7 0 R\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`12 0 obj`,
|
||||
`<<\n/S /Link\n/P 13 0 R\n/K [1]\n/Pg 7 0 R\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/S /Sect\n/P 8 0 R\n/K [14 0 R]\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`14 0 obj`,
|
||||
`<<\n/S /P\n/P 11 0 R\n/K [3]\n/Pg 7 0 R\n>>`,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`9 0 obj`,
|
||||
`<<\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([`9 0 obj`, `<<\n>>`, `endobj`]);
|
||||
});
|
||||
|
||||
test('constructed with closures', () => {
|
||||
@ -427,7 +409,10 @@ EMC
|
||||
const link = document.struct('Link', () => {});
|
||||
const p1 = document.struct('P');
|
||||
section1.add(p1);
|
||||
p1.add(() => {}).add(link).add(() => {}).end();
|
||||
p1.add(() => {})
|
||||
.add(link)
|
||||
.add(() => {})
|
||||
.end();
|
||||
const p2 = document.struct('P', [() => {}]);
|
||||
section2.add(p2);
|
||||
document.addStructure(section2);
|
||||
@ -437,12 +422,12 @@ EMC
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/StructTreeRoot 9 0 R/,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/MarkInfo 13 0 R/,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`9 0 obj`,
|
||||
@ -456,50 +441,48 @@ EMC
|
||||
/ParentTreeNextKey 1
|
||||
/K [8 0 R 10 0 R]
|
||||
>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`8 0 obj`,
|
||||
`<<\n/S /Sect\n/P 9 0 R\n/K [12 0 R]\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`12 0 obj`,
|
||||
`<<\n/S /P\n/P 8 0 R\n/K [0 11 0 R 2]\n/Pg 7 0 R\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/S /Link\n/P 12 0 R\n/K [1]\n/Pg 7 0 R\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`10 0 obj`,
|
||||
`<<\n/S /Sect\n/P 9 0 R\n/K [14 0 R]\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`14 0 obj`,
|
||||
`<<\n/S /P\n/P 10 0 R\n/K [3]\n/Pg 7 0 R\n>>`,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`13 0 obj`,
|
||||
`<<\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([`13 0 obj`, `<<\n>>`, `endobj`]);
|
||||
});
|
||||
|
||||
test('with options', () => {
|
||||
const docData = logData(document);
|
||||
|
||||
document.addStructure(document.struct('P', {
|
||||
title: "My Title",
|
||||
lang: "en-AU",
|
||||
alt: "My Alternative",
|
||||
expanded: "My Expansion",
|
||||
actual: "My Actual Text"
|
||||
}));
|
||||
document.addStructure(
|
||||
document.struct('P', {
|
||||
title: 'My Title',
|
||||
lang: 'en-AU',
|
||||
alt: 'My Alternative',
|
||||
expanded: 'My Expansion',
|
||||
actual: 'My Actual Text',
|
||||
}),
|
||||
);
|
||||
|
||||
document.end();
|
||||
|
||||
@ -515,7 +498,7 @@ EMC
|
||||
/P 9 0 R
|
||||
/K []
|
||||
>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -549,44 +532,36 @@ EMC
|
||||
document = new PDFDocument({
|
||||
info: {
|
||||
CreationDate: new Date(Date.UTC(2018, 1, 1)),
|
||||
Title: "My Title"
|
||||
Title: 'My Title',
|
||||
},
|
||||
displayTitle: true,
|
||||
compress: false,
|
||||
pdfVersion: '1.5',
|
||||
tagged: true,
|
||||
lang: 'en-AU'
|
||||
lang: 'en-AU',
|
||||
});
|
||||
|
||||
const docData = logData(document);
|
||||
|
||||
document.end();
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/Lang \(en-AU\)/,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/MarkInfo 5 0 R/,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([`3 0 obj`, /\/Lang \(en-AU\)/, `endobj`]);
|
||||
expect(docData).toContainChunk([`3 0 obj`, /\/MarkInfo 5 0 R/, `endobj`]);
|
||||
expect(docData).toContainChunk([
|
||||
`5 0 obj`,
|
||||
`<<\n/Marked true\n>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/ViewerPreferences 7 0 R/,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/StructTreeRoot 6 0 R/,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -599,35 +574,18 @@ EMC
|
||||
>>
|
||||
/ParentTreeNextKey 0
|
||||
>>`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
`7 0 obj`,
|
||||
/\/DisplayDocTitle true/,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`trailer`,
|
||||
/\/Info 11 0 R/,
|
||||
`startxref`
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`11 0 obj`,
|
||||
/\/Title 15 0 R/,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`15 0 obj`,
|
||||
`(My Title)`,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
`10 0 obj`,
|
||||
/\/Tabs \/S/,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([`trailer`, /\/Info 11 0 R/, `startxref`]);
|
||||
expect(docData).toContainChunk([`11 0 obj`, /\/Title 15 0 R/, `endobj`]);
|
||||
expect(docData).toContainChunk([`15 0 obj`, `(My Title)`, `endobj`]);
|
||||
expect(docData).toContainChunk([`10 0 obj`, /\/Tabs \/S/, `endobj`]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -636,34 +594,26 @@ EMC
|
||||
document = new PDFDocument({
|
||||
info: {
|
||||
CreationDate: new Date(Date.UTC(2018, 1, 1)),
|
||||
Title: "My Title"
|
||||
Title: 'My Title',
|
||||
},
|
||||
displayTitle: true,
|
||||
compress: false,
|
||||
pdfVersion: '1.5',
|
||||
tagged: false,
|
||||
lang: 'en-AU'
|
||||
lang: 'en-AU',
|
||||
});
|
||||
|
||||
const docData = logData(document);
|
||||
|
||||
document.end();
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/Lang \(en-AU\)/,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).toContainChunk([`3 0 obj`, /\/Lang \(en-AU\)/, `endobj`]);
|
||||
expect(docData).not.toContainChunk([
|
||||
`3 0 obj`,
|
||||
/\/MarkInfo 5 0 R/,
|
||||
`endobj`
|
||||
]);
|
||||
expect(docData).not.toContainChunk([
|
||||
`10 0 obj`,
|
||||
/\/Tabs \/S/,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).not.toContainChunk([`10 0 obj`, /\/Tabs \/S/, `endobj`]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -698,12 +648,12 @@ ET
|
||||
Q
|
||||
EMC
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
const section = document.struct('Sect');
|
||||
document.addStructure(section);
|
||||
document.text("Paragraph 1\nParagraph 2", { structParent: section });
|
||||
document.text('Paragraph 1\nParagraph 2', { structParent: section });
|
||||
document.end();
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -714,7 +664,7 @@ EMC
|
||||
`stream`,
|
||||
stream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
'11 0 obj',
|
||||
@ -785,12 +735,12 @@ ET
|
||||
Q
|
||||
EMC
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
const list = document.struct('List');
|
||||
document.addStructure(list);
|
||||
document.list(["Item 1","Item 2"], { structParent: list });
|
||||
document.list(['Item 1', 'Item 2'], { structParent: list });
|
||||
document.end();
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -801,42 +751,42 @@ EMC
|
||||
`stream`,
|
||||
stream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
'12 0 obj',
|
||||
'<<\n/S /Lbl\n/P 10 0 R\n/K [0]\n/Pg 7 0 R\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
'13 0 obj',
|
||||
'<<\n/S /LBody\n/P 10 0 R\n/K [1]\n/Pg 7 0 R\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
'16 0 obj',
|
||||
'<<\n/S /Lbl\n/P 15 0 R\n/K [2]\n/Pg 7 0 R\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
'17 0 obj',
|
||||
'<<\n/S /LBody\n/P 15 0 R\n/K [3]\n/Pg 7 0 R\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
'10 0 obj',
|
||||
'<<\n/S /LI\n/P 8 0 R\n/K [12 0 R 13 0 R]\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
'15 0 obj',
|
||||
'<<\n/S /LI\n/P 8 0 R\n/K [16 0 R 17 0 R]\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
'8 0 obj',
|
||||
'<<\n/S /List\n/P 9 0 R\n/K [10 0 R 15 0 R]\n>>',
|
||||
'endobj'
|
||||
'endobj',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,33 +1,32 @@
|
||||
import PDFMetadata from '../../lib/metadata';
|
||||
|
||||
describe('PDFMetadata', () => {
|
||||
let metadata;
|
||||
beforeEach(() => {
|
||||
metadata = new PDFMetadata();
|
||||
});
|
||||
let metadata;
|
||||
beforeEach(() => {
|
||||
metadata = new PDFMetadata();
|
||||
});
|
||||
|
||||
test('initialising metadata', () => {
|
||||
expect(metadata._metadata).toBeDefined();
|
||||
expect(metadata.getLength()).toBeGreaterThan(0);
|
||||
expect(typeof metadata._metadata).toBe('string')
|
||||
});
|
||||
test('initialising metadata', () => {
|
||||
expect(metadata._metadata).toBeDefined();
|
||||
expect(metadata.getLength()).toBeGreaterThan(0);
|
||||
expect(typeof metadata._metadata).toBe('string');
|
||||
});
|
||||
|
||||
test('contains appended XML', () => {
|
||||
let xml = `
|
||||
test('contains appended XML', () => {
|
||||
let xml = `
|
||||
<dc:title>
|
||||
<rdf:Alt>
|
||||
<rdf:li xml:lang="x-default">Test</rdf:li>
|
||||
</rdf:Alt>
|
||||
</dc:title>
|
||||
`
|
||||
metadata.append(xml);
|
||||
expect(metadata.getXML()).toContain(xml);
|
||||
});
|
||||
`;
|
||||
metadata.append(xml);
|
||||
expect(metadata.getXML()).toContain(xml);
|
||||
});
|
||||
|
||||
test('closing tags', () => {
|
||||
let length = metadata.getLength();
|
||||
metadata.end();
|
||||
expect(metadata.getLength()).toBeGreaterThan(length);
|
||||
});
|
||||
|
||||
});
|
||||
test('closing tags', () => {
|
||||
let length = metadata.getLength();
|
||||
metadata.end();
|
||||
expect(metadata.getLength()).toBeGreaterThan(length);
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
import PDFDocument from '../../lib/document';
|
||||
import { logData } from './helpers';
|
||||
|
||||
describe('Pattern', function() {
|
||||
describe('Pattern', function () {
|
||||
let document;
|
||||
|
||||
beforeEach(() => {
|
||||
document = new PDFDocument({
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) },
|
||||
compress: false
|
||||
compress: false,
|
||||
});
|
||||
});
|
||||
|
||||
@ -16,10 +16,7 @@ describe('Pattern', function() {
|
||||
const patternStream = '1 w 0 1 m 4 5 l s 2 0 m 5 3 l s';
|
||||
const binaryStream = Buffer.from(`${patternStream}\n`, 'binary');
|
||||
const pattern = document.pattern([1, 1, 4, 4], 3, 3, patternStream);
|
||||
document
|
||||
.rect(0, 0, 100, 100)
|
||||
.fill([pattern, 'blue'])
|
||||
.end();
|
||||
document.rect(0, 0, 100, 100).fill([pattern, 'blue']).end();
|
||||
|
||||
// empty resources
|
||||
expect(docData).toContainChunk(['10 0 obj', `<<\n>>`]);
|
||||
@ -41,7 +38,7 @@ describe('Pattern', function() {
|
||||
>>`,
|
||||
'stream',
|
||||
binaryStream,
|
||||
'\nendstream'
|
||||
'\nendstream',
|
||||
]);
|
||||
|
||||
// page resource dictionary with color space and pattern
|
||||
@ -56,7 +53,7 @@ describe('Pattern', function() {
|
||||
/Pattern <<
|
||||
/P1 11 0 R
|
||||
>>
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
// map to the underlying color space
|
||||
expect(docData).toContainChunk(['8 0 obj', `[/Pattern /DeviceCMYK]`]);
|
||||
@ -68,7 +65,7 @@ describe('Pattern', function() {
|
||||
/CsPDeviceRGB cs
|
||||
0 0 1 /P1 scn
|
||||
f\n`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
expect(docData).toContainChunk([
|
||||
'5 0 obj',
|
||||
@ -77,7 +74,7 @@ f\n`,
|
||||
>>`,
|
||||
'stream',
|
||||
graphicsStream,
|
||||
'\nendstream'
|
||||
'\nendstream',
|
||||
]);
|
||||
});
|
||||
|
||||
@ -87,19 +84,16 @@ f\n`,
|
||||
[1, 1, 4, 4],
|
||||
3,
|
||||
3,
|
||||
'1 w 0 1 m 4 5 l s 2 0 m 5 3 l s'
|
||||
'1 w 0 1 m 4 5 l s 2 0 m 5 3 l s',
|
||||
);
|
||||
const pattern2 = document.pattern(
|
||||
[1, 1, 7, 7],
|
||||
6,
|
||||
6,
|
||||
'1 w 0 1 m 7 8 l s 5 0 m 8 3 l s'
|
||||
'1 w 0 1 m 7 8 l s 5 0 m 8 3 l s',
|
||||
);
|
||||
document.rect(0, 0, 100, 100).fill([pattern1, 'blue']);
|
||||
document
|
||||
.rect(0, 0, 100, 100)
|
||||
.fill([pattern2, 'red'])
|
||||
.end();
|
||||
document.rect(0, 0, 100, 100).fill([pattern2, 'red']).end();
|
||||
|
||||
// patterns P1 and P2
|
||||
expect(docData).toContainChunk([
|
||||
@ -114,7 +108,7 @@ f\n`,
|
||||
/P1 11 0 R
|
||||
/P2 13 0 R
|
||||
>>
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -2,110 +2,108 @@ import PDFDocument from '../../lib/document';
|
||||
import { logData, joinTokens } from './helpers';
|
||||
|
||||
describe('PDF/A-1', () => {
|
||||
test('metadata is present', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/length 892\n/Type /Metadata\n/Subtype /XML\n/Length 894\n>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
test('metadata is present', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/length 892\n/Type /Metadata\n/Subtype /XML\n/Length 894\n>>`
|
||||
]);
|
||||
test('color profile is present', () => {
|
||||
const expected = [
|
||||
`10 0 obj`,
|
||||
joinTokens(
|
||||
'<<',
|
||||
'/Type /OutputIntent',
|
||||
'/S /GTS_PDFA1',
|
||||
'/Info (sRGB IEC61966-2.1)',
|
||||
'/OutputConditionIdentifier (sRGB IEC61966-2.1)',
|
||||
'/DestOutputProfile 9 0 R',
|
||||
'>>',
|
||||
),
|
||||
];
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk(expected);
|
||||
});
|
||||
|
||||
test('metadata contains pdfaid part and conformance', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:part>1');
|
||||
expect(metadata).toContain('pdfaid:conformance');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance B', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1b',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>B');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance A', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1a',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>A');
|
||||
});
|
||||
|
||||
test('font data contains CIDSet', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1a',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.addPage();
|
||||
doc.registerFont('Roboto', 'tests/fonts/Roboto-Regular.ttf');
|
||||
doc.font('Roboto');
|
||||
doc.text('Text');
|
||||
doc.end();
|
||||
|
||||
let fontDescriptor = data.find((v) => {
|
||||
return v.includes('/Type /FontDescriptor');
|
||||
});
|
||||
|
||||
test('color profile is present', () => {
|
||||
const expected = [
|
||||
`10 0 obj`,
|
||||
joinTokens(
|
||||
'<<',
|
||||
'/Type /OutputIntent',
|
||||
'/S /GTS_PDFA1',
|
||||
'/Info (sRGB IEC61966-2.1)',
|
||||
'/OutputConditionIdentifier (sRGB IEC61966-2.1)',
|
||||
'/DestOutputProfile 9 0 R',
|
||||
'>>'
|
||||
),
|
||||
];
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk(expected);
|
||||
});
|
||||
expect(fontDescriptor).not.toBeUndefined();
|
||||
|
||||
test('metadata contains pdfaid part and conformance', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:part>1');
|
||||
expect(metadata).toContain('pdfaid:conformance');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance B', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1b'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>B');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance A', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1a'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>A');
|
||||
});
|
||||
|
||||
test('font data contains CIDSet', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-1a'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.addPage();
|
||||
doc.registerFont('Roboto', 'tests/fonts/Roboto-Regular.ttf');
|
||||
doc.font('Roboto');
|
||||
doc.text('Text');
|
||||
doc.end();
|
||||
|
||||
let fontDescriptor = data.find((v) => {
|
||||
return v.includes('/Type /FontDescriptor');
|
||||
});
|
||||
|
||||
expect(fontDescriptor).not.toBeUndefined();
|
||||
|
||||
expect(fontDescriptor).toContain('/CIDSet');
|
||||
});
|
||||
|
||||
});
|
||||
expect(fontDescriptor).toContain('/CIDSet');
|
||||
});
|
||||
});
|
||||
|
||||
@ -2,110 +2,108 @@ import PDFDocument from '../../lib/document';
|
||||
import { logData, joinTokens } from './helpers';
|
||||
|
||||
describe('PDF/A-2', () => {
|
||||
test('metadata is present', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-2',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/length 892\n/Type /Metadata\n/Subtype /XML\n/Length 894\n>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
test('metadata is present', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-2'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/length 892\n/Type /Metadata\n/Subtype /XML\n/Length 894\n>>`
|
||||
]);
|
||||
test('color profile is present', () => {
|
||||
const expected = [
|
||||
`10 0 obj`,
|
||||
joinTokens(
|
||||
'<<',
|
||||
'/Type /OutputIntent',
|
||||
'/S /GTS_PDFA1',
|
||||
'/Info (sRGB IEC61966-2.1)',
|
||||
'/OutputConditionIdentifier (sRGB IEC61966-2.1)',
|
||||
'/DestOutputProfile 9 0 R',
|
||||
'>>',
|
||||
),
|
||||
];
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-2',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk(expected);
|
||||
});
|
||||
|
||||
test('metadata contains pdfaid part and conformance', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-2',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:part>2');
|
||||
expect(metadata).toContain('pdfaid:conformance');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance B', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-2b',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>B');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance A', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-2a',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>A');
|
||||
});
|
||||
|
||||
test('font data NOT contains CIDSet', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-2a',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.addPage();
|
||||
doc.registerFont('Roboto', 'tests/fonts/Roboto-Regular.ttf');
|
||||
doc.font('Roboto');
|
||||
doc.text('Text');
|
||||
doc.end();
|
||||
|
||||
let fontDescriptor = data.find((v) => {
|
||||
return v.includes('/Type /FontDescriptor');
|
||||
});
|
||||
|
||||
test('color profile is present', () => {
|
||||
const expected = [
|
||||
`10 0 obj`,
|
||||
joinTokens(
|
||||
'<<',
|
||||
'/Type /OutputIntent',
|
||||
'/S /GTS_PDFA1',
|
||||
'/Info (sRGB IEC61966-2.1)',
|
||||
'/OutputConditionIdentifier (sRGB IEC61966-2.1)',
|
||||
'/DestOutputProfile 9 0 R',
|
||||
'>>'
|
||||
),
|
||||
];
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-2'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk(expected);
|
||||
});
|
||||
expect(fontDescriptor).not.toBeUndefined();
|
||||
|
||||
test('metadata contains pdfaid part and conformance', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-2'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:part>2');
|
||||
expect(metadata).toContain('pdfaid:conformance');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance B', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-2b'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>B');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance A', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-2a'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>A');
|
||||
});
|
||||
|
||||
test('font data NOT contains CIDSet', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-2a'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.addPage();
|
||||
doc.registerFont('Roboto', 'tests/fonts/Roboto-Regular.ttf');
|
||||
doc.font('Roboto');
|
||||
doc.text('Text');
|
||||
doc.end();
|
||||
|
||||
let fontDescriptor = data.find((v) => {
|
||||
return v.includes('/Type /FontDescriptor');
|
||||
});
|
||||
|
||||
expect(fontDescriptor).not.toBeUndefined();
|
||||
|
||||
expect(fontDescriptor).not.toContain('/CIDSet');
|
||||
});
|
||||
|
||||
});
|
||||
expect(fontDescriptor).not.toContain('/CIDSet');
|
||||
});
|
||||
});
|
||||
|
||||
@ -2,110 +2,108 @@ import PDFDocument from '../../lib/document';
|
||||
import { logData, joinTokens } from './helpers';
|
||||
|
||||
describe('PDF/A-3', () => {
|
||||
test('metadata is present', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-3',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/length 892\n/Type /Metadata\n/Subtype /XML\n/Length 894\n>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
test('metadata is present', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-3'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/length 892\n/Type /Metadata\n/Subtype /XML\n/Length 894\n>>`
|
||||
]);
|
||||
test('color profile is present', () => {
|
||||
const expected = [
|
||||
`10 0 obj`,
|
||||
joinTokens(
|
||||
'<<',
|
||||
'/Type /OutputIntent',
|
||||
'/S /GTS_PDFA1',
|
||||
'/Info (sRGB IEC61966-2.1)',
|
||||
'/OutputConditionIdentifier (sRGB IEC61966-2.1)',
|
||||
'/DestOutputProfile 9 0 R',
|
||||
'>>',
|
||||
),
|
||||
];
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-3',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk(expected);
|
||||
});
|
||||
|
||||
test('metadata contains pdfaid part and conformance', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-3',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:part>3');
|
||||
expect(metadata).toContain('pdfaid:conformance');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance B', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-3b',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>B');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance A', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-3a',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>A');
|
||||
});
|
||||
|
||||
test('font data NOT contains CIDSet', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-3a',
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.addPage();
|
||||
doc.registerFont('Roboto', 'tests/fonts/Roboto-Regular.ttf');
|
||||
doc.font('Roboto');
|
||||
doc.text('Text');
|
||||
doc.end();
|
||||
|
||||
let fontDescriptor = data.find((v) => {
|
||||
return v.includes('/Type /FontDescriptor');
|
||||
});
|
||||
|
||||
test('color profile is present', () => {
|
||||
const expected = [
|
||||
`10 0 obj`,
|
||||
joinTokens(
|
||||
'<<',
|
||||
'/Type /OutputIntent',
|
||||
'/S /GTS_PDFA1',
|
||||
'/Info (sRGB IEC61966-2.1)',
|
||||
'/OutputConditionIdentifier (sRGB IEC61966-2.1)',
|
||||
'/DestOutputProfile 9 0 R',
|
||||
'>>'
|
||||
),
|
||||
];
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-3'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk(expected);
|
||||
});
|
||||
expect(fontDescriptor).not.toBeUndefined();
|
||||
|
||||
test('metadata contains pdfaid part and conformance', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-3'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:part>3');
|
||||
expect(metadata).toContain('pdfaid:conformance');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance B', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-3b'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>B');
|
||||
});
|
||||
|
||||
test('metadata pdfaid conformance A', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/A-3a'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[27]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfaid:conformance>A');
|
||||
});
|
||||
|
||||
test('font data NOT contains CIDSet', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.4',
|
||||
subset: 'PDF/A-3a'
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.addPage();
|
||||
doc.registerFont('Roboto', 'tests/fonts/Roboto-Regular.ttf');
|
||||
doc.font('Roboto');
|
||||
doc.text('Text');
|
||||
doc.end();
|
||||
|
||||
let fontDescriptor = data.find((v) => {
|
||||
return v.includes('/Type /FontDescriptor');
|
||||
});
|
||||
|
||||
expect(fontDescriptor).not.toBeUndefined();
|
||||
|
||||
expect(fontDescriptor).not.toContain('/CIDSet');
|
||||
});
|
||||
|
||||
});
|
||||
expect(fontDescriptor).not.toContain('/CIDSet');
|
||||
});
|
||||
});
|
||||
|
||||
@ -2,36 +2,34 @@ import PDFDocument from '../../lib/document';
|
||||
import { logData } from './helpers';
|
||||
|
||||
describe('PDF/UA', () => {
|
||||
test('metadata is present', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/UA',
|
||||
tagged: true,
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/length 841\n/Type /Metadata\n/Subtype /XML\n/Length 843\n>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
test('metadata is present', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/UA',
|
||||
tagged: true
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
expect(data).toContainChunk([
|
||||
`11 0 obj`,
|
||||
`<<\n/length 841\n/Type /Metadata\n/Subtype /XML\n/Length 843\n>>`
|
||||
]);
|
||||
});
|
||||
test('metadata constains pdfuaid part', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/UA',
|
||||
tagged: true,
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[24]).toString();
|
||||
|
||||
test('metadata constains pdfuaid part', () => {
|
||||
let options = {
|
||||
autoFirstPage: false,
|
||||
pdfVersion: '1.7',
|
||||
subset: 'PDF/UA',
|
||||
tagged: true
|
||||
};
|
||||
let doc = new PDFDocument(options);
|
||||
const data = logData(doc);
|
||||
doc.end();
|
||||
let metadata = Buffer.from(data[24]).toString();
|
||||
|
||||
expect(metadata).toContain('pdfuaid:part>1');
|
||||
});
|
||||
|
||||
});
|
||||
expect(metadata).toContain('pdfuaid:part>1');
|
||||
});
|
||||
});
|
||||
|
||||
@ -6,7 +6,7 @@ import fs from 'fs';
|
||||
describe('PNGImage', () => {
|
||||
let document;
|
||||
|
||||
const createImage = fileName => {
|
||||
const createImage = (fileName) => {
|
||||
const img = new PNGImage(fs.readFileSync(fileName), 'I1');
|
||||
// noop data manipulation methods
|
||||
img.loadIndexedAlphaChannel = () => {
|
||||
@ -56,14 +56,14 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 400,
|
||||
DecodeParms: expect.any(PDFReference)
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.DecodeParms.data).toMatchObject({
|
||||
BitsPerComponent: 8,
|
||||
Colors: 3,
|
||||
Columns: 400,
|
||||
Predictor: 15
|
||||
Predictor: 15,
|
||||
});
|
||||
});
|
||||
|
||||
@ -77,7 +77,7 @@ describe('PNGImage', () => {
|
||||
// Interlace = 0
|
||||
|
||||
const img = createImage(
|
||||
'./tests/images/pngsuite-rgb-transparent-white.png'
|
||||
'./tests/images/pngsuite-rgb-transparent-white.png',
|
||||
);
|
||||
|
||||
expect(img.finalize).toBeCalledTimes(1);
|
||||
@ -92,14 +92,14 @@ describe('PNGImage', () => {
|
||||
Type: 'XObject',
|
||||
Width: 32,
|
||||
Mask: [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255],
|
||||
DecodeParms: expect.any(PDFReference)
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.DecodeParms.data).toMatchObject({
|
||||
BitsPerComponent: 16,
|
||||
Colors: 3,
|
||||
Columns: 32,
|
||||
Predictor: 15
|
||||
Predictor: 15,
|
||||
});
|
||||
});
|
||||
|
||||
@ -125,7 +125,7 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 409,
|
||||
SMask: expect.any(PDFReference)
|
||||
SMask: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.SMask.data).toMatchObject({
|
||||
@ -137,7 +137,7 @@ describe('PNGImage', () => {
|
||||
Length: 16,
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 409
|
||||
Width: 409,
|
||||
});
|
||||
});
|
||||
|
||||
@ -163,7 +163,7 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 175,
|
||||
SMask: expect.any(PDFReference)
|
||||
SMask: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.SMask.data).toMatchObject({
|
||||
@ -175,7 +175,7 @@ describe('PNGImage', () => {
|
||||
Length: 16,
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 175
|
||||
Width: 175,
|
||||
});
|
||||
});
|
||||
|
||||
@ -201,14 +201,14 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 980,
|
||||
DecodeParms: expect.any(PDFReference)
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.DecodeParms.data).toMatchObject({
|
||||
BitsPerComponent: 8,
|
||||
Colors: 1,
|
||||
Columns: 980,
|
||||
Predictor: 15
|
||||
Predictor: 15,
|
||||
});
|
||||
});
|
||||
|
||||
@ -222,7 +222,7 @@ describe('PNGImage', () => {
|
||||
// Interlace = 0
|
||||
|
||||
const img = createImage(
|
||||
'./tests/images/pngsuite-palette-transparent-white.png'
|
||||
'./tests/images/pngsuite-palette-transparent-white.png',
|
||||
);
|
||||
|
||||
expect(img.finalize).toBeCalledTimes(1);
|
||||
@ -237,14 +237,14 @@ describe('PNGImage', () => {
|
||||
Type: 'XObject',
|
||||
Width: 32,
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
SMask: expect.any(PDFReference)
|
||||
SMask: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.DecodeParms.data).toMatchObject({
|
||||
BitsPerComponent: 8,
|
||||
Colors: 1,
|
||||
Columns: 32,
|
||||
Predictor: 15
|
||||
Predictor: 15,
|
||||
});
|
||||
|
||||
expect(img.obj.data.SMask.data).toMatchObject({
|
||||
@ -256,7 +256,7 @@ describe('PNGImage', () => {
|
||||
Length: 16,
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 32
|
||||
Width: 32,
|
||||
});
|
||||
});
|
||||
|
||||
@ -282,7 +282,7 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 320,
|
||||
DecodeParms: expect.any(PDFReference)
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
});
|
||||
});
|
||||
|
||||
@ -296,7 +296,7 @@ describe('PNGImage', () => {
|
||||
// Interlace = 0
|
||||
|
||||
const img = createImage(
|
||||
'./tests/images/pngsuite-gray-transparent-black.png'
|
||||
'./tests/images/pngsuite-gray-transparent-black.png',
|
||||
);
|
||||
|
||||
expect(img.finalize).toBeCalledTimes(1);
|
||||
@ -311,14 +311,14 @@ describe('PNGImage', () => {
|
||||
Type: 'XObject',
|
||||
Width: 32,
|
||||
Mask: [0, 0],
|
||||
DecodeParms: expect.any(PDFReference)
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.DecodeParms.data).toMatchObject({
|
||||
BitsPerComponent: 4,
|
||||
Colors: 1,
|
||||
Columns: 32,
|
||||
Predictor: 15
|
||||
Predictor: 15,
|
||||
});
|
||||
});
|
||||
|
||||
@ -332,7 +332,7 @@ describe('PNGImage', () => {
|
||||
// Interlace = 0
|
||||
|
||||
const img = createImage(
|
||||
'./tests/images/pngsuite-gray-transparent-white.png'
|
||||
'./tests/images/pngsuite-gray-transparent-white.png',
|
||||
);
|
||||
|
||||
expect(img.finalize).toBeCalledTimes(1);
|
||||
@ -347,14 +347,14 @@ describe('PNGImage', () => {
|
||||
Type: 'XObject',
|
||||
Width: 32,
|
||||
Mask: [255, 255],
|
||||
DecodeParms: expect.any(PDFReference)
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.DecodeParms.data).toMatchObject({
|
||||
BitsPerComponent: 16,
|
||||
Colors: 1,
|
||||
Columns: 32,
|
||||
Predictor: 15
|
||||
Predictor: 15,
|
||||
});
|
||||
});
|
||||
|
||||
@ -380,7 +380,7 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 112,
|
||||
SMask: expect.any(PDFReference)
|
||||
SMask: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.SMask.data).toMatchObject({
|
||||
@ -392,7 +392,7 @@ describe('PNGImage', () => {
|
||||
Length: 16,
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 112
|
||||
Width: 112,
|
||||
});
|
||||
});
|
||||
|
||||
@ -418,14 +418,14 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 32,
|
||||
DecodeParms: expect.any(PDFReference)
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.DecodeParms.data).toMatchObject({
|
||||
BitsPerComponent: 8,
|
||||
Colors: 1,
|
||||
Columns: 32,
|
||||
Predictor: 1
|
||||
Predictor: 1,
|
||||
});
|
||||
});
|
||||
|
||||
@ -451,14 +451,14 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 32,
|
||||
DecodeParms: expect.any(PDFReference)
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.DecodeParms.data).toMatchObject({
|
||||
BitsPerComponent: 8,
|
||||
Colors: 1,
|
||||
Columns: 32,
|
||||
Predictor: 1
|
||||
Predictor: 1,
|
||||
});
|
||||
});
|
||||
|
||||
@ -484,14 +484,14 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 32,
|
||||
DecodeParms: expect.any(PDFReference)
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.DecodeParms.data).toMatchObject({
|
||||
BitsPerComponent: 8,
|
||||
Colors: 3,
|
||||
Columns: 32,
|
||||
Predictor: 1
|
||||
Predictor: 1,
|
||||
});
|
||||
});
|
||||
|
||||
@ -517,14 +517,14 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 32,
|
||||
DecodeParms: expect.any(PDFReference)
|
||||
DecodeParms: expect.any(PDFReference),
|
||||
});
|
||||
|
||||
expect(img.obj.data.DecodeParms.data).toMatchObject({
|
||||
BitsPerComponent: 16,
|
||||
Colors: 3,
|
||||
Columns: 32,
|
||||
Predictor: 1
|
||||
Predictor: 1,
|
||||
});
|
||||
});
|
||||
|
||||
@ -550,7 +550,7 @@ describe('PNGImage', () => {
|
||||
Subtype: 'Image',
|
||||
Type: 'XObject',
|
||||
Width: 32,
|
||||
SMask: expect.any(PDFReference)
|
||||
SMask: expect.any(PDFReference),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -49,7 +49,7 @@ describe('PDFReference', () => {
|
||||
'stream',
|
||||
chunk,
|
||||
'\nendstream',
|
||||
'endobj'
|
||||
'endobj',
|
||||
]);
|
||||
});
|
||||
|
||||
@ -70,7 +70,7 @@ describe('PDFReference', () => {
|
||||
'stream',
|
||||
compressed,
|
||||
'\nendstream',
|
||||
'endobj'
|
||||
'endobj',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -7,7 +7,7 @@ describe('Text', () => {
|
||||
beforeEach(() => {
|
||||
document = new PDFDocument({
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) },
|
||||
compress: false
|
||||
compress: false,
|
||||
});
|
||||
});
|
||||
|
||||
@ -26,7 +26,7 @@ BT
|
||||
ET
|
||||
Q
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
document.text('simple text');
|
||||
@ -40,7 +40,7 @@ Q
|
||||
`stream`,
|
||||
textStream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -58,7 +58,7 @@ BT
|
||||
ET
|
||||
Q
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
document.text('simple text', 600, 20);
|
||||
@ -72,7 +72,7 @@ Q
|
||||
`stream`,
|
||||
textStream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -163,7 +163,7 @@ BT
|
||||
ET
|
||||
Q
|
||||
`,
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
// before this test, this case used to make the code run into an infinite loop.
|
||||
@ -191,7 +191,7 @@ Q
|
||||
`stream`,
|
||||
textStream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -46,13 +46,13 @@ export default {
|
||||
if (pass) {
|
||||
return {
|
||||
pass: true,
|
||||
message: passMessage(this.utils, data, chunk, headIndex)
|
||||
message: passMessage(this.utils, data, chunk, headIndex),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
pass: false,
|
||||
message: failMessage(this.utils, data, chunk, headIndex)
|
||||
message: failMessage(this.utils, data, chunk, headIndex),
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@ -12,7 +12,7 @@ describe('Document trailer', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
document = new PDFDocument({
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) }
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) },
|
||||
});
|
||||
});
|
||||
|
||||
@ -21,14 +21,14 @@ describe('Document trailer', () => {
|
||||
document.end();
|
||||
expect(docData).toContainChunk([
|
||||
'8 0 obj',
|
||||
'<<\n/Producer 9 0 R\n/Creator 10 0 R\n/CreationDate 11 0 R\n>>'
|
||||
'<<\n/Producer 9 0 R\n/Creator 10 0 R\n/CreationDate 11 0 R\n>>',
|
||||
]);
|
||||
expect(docData).toContainChunk(['9 0 obj', '(PDFKit)']);
|
||||
expect(docData).toContainChunk(['10 0 obj', '(PDFKit)']);
|
||||
expect(docData).toContainChunk(['11 0 obj', '(D:20180201000000Z)']);
|
||||
expect(docData).toContainChunk([
|
||||
'trailer',
|
||||
`<<\n/Size 12\n/Root 3 0 R\n/Info 8 0 R\n/ID [<6d6f636b65642d7064662d6964> <6d6f636b65642d7064662d6964>]\n>>`
|
||||
`<<\n/Size 12\n/Root 3 0 R\n/Info 8 0 R\n/ID [<6d6f636b65642d7064662d6964> <6d6f636b65642d7064662d6964>]\n>>`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -37,7 +37,7 @@ describe('Document trailer', () => {
|
||||
document.end();
|
||||
expect(docData).toContainChunk([
|
||||
'2 0 obj',
|
||||
'<<\n/Dests <<\n /Names [\n]\n>>\n>>'
|
||||
'<<\n/Dests <<\n /Names [\n]\n>>\n>>',
|
||||
]);
|
||||
});
|
||||
|
||||
@ -60,7 +60,7 @@ describe('Document trailer', () => {
|
||||
(LINK3) [7 0 R /XYZ 36 756 50]
|
||||
]
|
||||
>>
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
expect(docData).toContainChunk([
|
||||
'7 0 obj',
|
||||
@ -71,7 +71,7 @@ describe('Document trailer', () => {
|
||||
/Contents 5 0 R
|
||||
/Resources 6 0 R
|
||||
/Annots [9 0 R]
|
||||
>>`
|
||||
>>`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -7,7 +7,7 @@ describe('Vector Graphics', () => {
|
||||
beforeEach(() => {
|
||||
document = new PDFDocument({
|
||||
info: { CreationDate: new Date(Date.UTC(2018, 1, 1)) },
|
||||
compress: false
|
||||
compress: false,
|
||||
});
|
||||
});
|
||||
|
||||
@ -16,13 +16,10 @@ describe('Vector Graphics', () => {
|
||||
const docData = logData(document);
|
||||
const vectorStream = Buffer.from(
|
||||
'1 0 0 -1 0 792 cm\n50 20 m\n[2 2] 0 d\nS\n',
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
document
|
||||
.moveTo(50, 20)
|
||||
.dash(2)
|
||||
.stroke();
|
||||
document.moveTo(50, 20).dash(2).stroke();
|
||||
document.end();
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -33,7 +30,7 @@ describe('Vector Graphics', () => {
|
||||
`stream`,
|
||||
vectorStream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -41,13 +38,10 @@ describe('Vector Graphics', () => {
|
||||
const docData = logData(document);
|
||||
const vectorStream = Buffer.from(
|
||||
'1 0 0 -1 0 792 cm\n50 20 m\n[1 2] 0 d\nS\n',
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
document
|
||||
.moveTo(50, 20)
|
||||
.dash([1, 2])
|
||||
.stroke();
|
||||
document.moveTo(50, 20).dash([1, 2]).stroke();
|
||||
document.end();
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -58,7 +52,7 @@ describe('Vector Graphics', () => {
|
||||
`stream`,
|
||||
vectorStream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -66,13 +60,10 @@ describe('Vector Graphics', () => {
|
||||
const docData = logData(document);
|
||||
const vectorStream = Buffer.from(
|
||||
'1 0 0 -1 0 792 cm\n50 20 m\n[2 10] 0 d\nS\n',
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
document
|
||||
.moveTo(50, 20)
|
||||
.dash(2, { space: 10 })
|
||||
.stroke();
|
||||
document.moveTo(50, 20).dash(2, { space: 10 }).stroke();
|
||||
document.end();
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -83,7 +74,7 @@ describe('Vector Graphics', () => {
|
||||
`stream`,
|
||||
vectorStream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -91,13 +82,10 @@ describe('Vector Graphics', () => {
|
||||
const docData = logData(document);
|
||||
const vectorStream = Buffer.from(
|
||||
'1 0 0 -1 0 792 cm\n50 20 m\n[2 2] 8 d\nS\n',
|
||||
'binary'
|
||||
'binary',
|
||||
);
|
||||
|
||||
document
|
||||
.moveTo(50, 20)
|
||||
.dash(2, { phase: 8 })
|
||||
.stroke();
|
||||
document.moveTo(50, 20).dash(2, { phase: 8 }).stroke();
|
||||
document.end();
|
||||
|
||||
expect(docData).toContainChunk([
|
||||
@ -108,7 +96,7 @@ describe('Vector Graphics', () => {
|
||||
`stream`,
|
||||
vectorStream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
|
||||
@ -129,7 +117,7 @@ describe('Vector Graphics', () => {
|
||||
const doc = new PDFDocument();
|
||||
|
||||
expect(() => doc.dash(0)).toThrow(
|
||||
'dash(0, {}) invalid, lengths must be numeric and greater than zero'
|
||||
'dash(0, {}) invalid, lengths must be numeric and greater than zero',
|
||||
);
|
||||
});
|
||||
|
||||
@ -137,7 +125,7 @@ describe('Vector Graphics', () => {
|
||||
const doc = new PDFDocument();
|
||||
|
||||
expect(() => doc.dash(-1)).toThrow(
|
||||
'dash(-1, {}) invalid, lengths must be numeric and greater than zero'
|
||||
'dash(-1, {}) invalid, lengths must be numeric and greater than zero',
|
||||
);
|
||||
});
|
||||
|
||||
@ -145,7 +133,7 @@ describe('Vector Graphics', () => {
|
||||
const doc = new PDFDocument();
|
||||
|
||||
expect(() => doc.dash(null)).toThrow(
|
||||
'dash(null, {}) invalid, lengths must be numeric and greater than zero'
|
||||
'dash(null, {}) invalid, lengths must be numeric and greater than zero',
|
||||
);
|
||||
});
|
||||
|
||||
@ -159,7 +147,7 @@ describe('Vector Graphics', () => {
|
||||
const doc = new PDFDocument();
|
||||
|
||||
expect(() => doc.dash([2, 0, 3])).toThrow(
|
||||
'dash([2,0,3], {}) invalid, lengths must be numeric and greater than zero'
|
||||
'dash([2,0,3], {}) invalid, lengths must be numeric and greater than zero',
|
||||
);
|
||||
});
|
||||
});
|
||||
@ -168,10 +156,12 @@ describe('Vector Graphics', () => {
|
||||
describe('translate', () => {
|
||||
test('identity transform is ignored', () => {
|
||||
const docData = logData(document);
|
||||
const vectorStream = Buffer.from(`1 0 0 -1 0 792 cm\n1 0 0 1 0 0 cm\n`, 'binary');
|
||||
const vectorStream = Buffer.from(
|
||||
`1 0 0 -1 0 792 cm\n1 0 0 1 0 0 cm\n`,
|
||||
'binary',
|
||||
);
|
||||
|
||||
document
|
||||
.translate(0, 0);
|
||||
document.translate(0, 0);
|
||||
document.end();
|
||||
|
||||
expect(docData).not.toContainChunk([
|
||||
@ -182,7 +172,7 @@ describe('Vector Graphics', () => {
|
||||
`stream`,
|
||||
vectorStream,
|
||||
`\nendstream`,
|
||||
`endobj`
|
||||
`endobj`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -3,33 +3,33 @@ import fs from '../../lib/virtual-fs';
|
||||
function checkMissingFiles(files) {
|
||||
for (let file of files) {
|
||||
expect(() => fs.readFileSync(`files/${file}`)).toThrow(
|
||||
`File 'files/${file}' not found in virtual file system`
|
||||
`File 'files/${file}' not found in virtual file system`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
describe('virtual-fs', function() {
|
||||
describe('virtual-fs', function () {
|
||||
beforeEach(() => {
|
||||
fs.fileData = {};
|
||||
});
|
||||
|
||||
test('readFileSync', function() {
|
||||
test('readFileSync', function () {
|
||||
checkMissingFiles(['encoded', 'raw', 'binary']);
|
||||
|
||||
fs.bindFileData({
|
||||
'files/binary': Buffer.from('Buffer content')
|
||||
'files/binary': Buffer.from('Buffer content'),
|
||||
});
|
||||
|
||||
const base64Data = fs.readFileSync('files/binary', 'base64');
|
||||
expect(base64Data).toEqual('QnVmZmVyIGNvbnRlbnQ=');
|
||||
});
|
||||
|
||||
test('writeFileSync', function() {
|
||||
test('writeFileSync', function () {
|
||||
checkMissingFiles(['encoded', 'raw', 'binary']);
|
||||
|
||||
fs.writeFileSync(
|
||||
'files/encoded',
|
||||
Buffer.from('File content').toString('base64')
|
||||
Buffer.from('File content').toString('base64'),
|
||||
);
|
||||
fs.writeFileSync('files/raw', 'File content');
|
||||
fs.writeFileSync('files/binary', new Uint8Array([4, 3, 1, 2]));
|
||||
@ -46,16 +46,16 @@ describe('virtual-fs', function() {
|
||||
expect(binaryData.toJSON()).toEqual({ data: [4, 3, 1, 2], type: 'Buffer' });
|
||||
});
|
||||
|
||||
test('bindFileData', function() {
|
||||
test('bindFileData', function () {
|
||||
checkMissingFiles(['encoded', 'raw', 'binary']);
|
||||
|
||||
fs.bindFileData({
|
||||
'files/encoded': Buffer.from('File content').toString('base64')
|
||||
'files/encoded': Buffer.from('File content').toString('base64'),
|
||||
});
|
||||
|
||||
fs.bindFileData({
|
||||
'files/raw': 'File content',
|
||||
'files/binary': new Uint8Array([4, 3, 1, 2])
|
||||
'files/binary': new Uint8Array([4, 3, 1, 2]),
|
||||
});
|
||||
|
||||
const encodedData = fs.readFileSync('files/encoded');
|
||||
@ -73,9 +73,9 @@ describe('virtual-fs', function() {
|
||||
fs.bindFileData(
|
||||
{
|
||||
'files/raw': 'New File content',
|
||||
'files/binary2': new Uint8Array([4, 3, 1, 2])
|
||||
'files/binary2': new Uint8Array([4, 3, 1, 2]),
|
||||
},
|
||||
{ reset: true }
|
||||
{ reset: true },
|
||||
);
|
||||
|
||||
checkMissingFiles(['encoded', 'binary']);
|
||||
|
||||
@ -24,29 +24,29 @@ Cyrillic 2 (Extended)
|
||||
ЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏҐӁҒҖҚҢҮҰҲҶҺӘӢӨӮ
|
||||
ѐёђѓєѕіїјљњћќѝўџґӂғҗқңүұҳҷһәӣөӯ`;
|
||||
|
||||
describe('fonts', function() {
|
||||
test.skip('default (Helvetica)', function() {
|
||||
return runDocTest({ systemFonts: true }, function(doc) {
|
||||
describe('fonts', function () {
|
||||
test.skip('default (Helvetica)', function () {
|
||||
return runDocTest({ systemFonts: true }, function (doc) {
|
||||
doc.text(characters, 10, 10);
|
||||
});
|
||||
});
|
||||
|
||||
test.skip('Helvetica Bold', function() {
|
||||
return runDocTest({ systemFonts: true }, function(doc) {
|
||||
test.skip('Helvetica Bold', function () {
|
||||
return runDocTest({ systemFonts: true }, function (doc) {
|
||||
doc.font('Helvetica-Bold');
|
||||
doc.text(characters, 10, 10);
|
||||
});
|
||||
});
|
||||
|
||||
test('Roboto', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('Roboto', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.text(characters, 10, 10);
|
||||
});
|
||||
});
|
||||
|
||||
test('Roboto Bold', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('Roboto Bold', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Medium.ttf');
|
||||
doc.text(characters, 10, 10);
|
||||
});
|
||||
|
||||
@ -15,25 +15,27 @@ function runDocTest(options, fn) {
|
||||
const buffers = [];
|
||||
|
||||
(async () => {
|
||||
await fn(doc)
|
||||
})().then(() => {
|
||||
doc.on('error', (err) => reject(err))
|
||||
doc.on('data', buffers.push.bind(buffers));
|
||||
doc.on('end', async () => {
|
||||
try {
|
||||
const pdfData = Buffer.concat(buffers);
|
||||
const { systemFonts = false } = options;
|
||||
const images = await pdf2png(pdfData, { systemFonts });
|
||||
for (let image of images) {
|
||||
expect(image).toMatchImageSnapshot();
|
||||
await fn(doc);
|
||||
})()
|
||||
.then(() => {
|
||||
doc.on('error', (err) => reject(err));
|
||||
doc.on('data', buffers.push.bind(buffers));
|
||||
doc.on('end', async () => {
|
||||
try {
|
||||
const pdfData = Buffer.concat(buffers);
|
||||
const { systemFonts = false } = options;
|
||||
const images = await pdf2png(pdfData, { systemFonts });
|
||||
for (let image of images) {
|
||||
expect(image).toMatchImageSnapshot();
|
||||
}
|
||||
resolve();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
resolve();
|
||||
} catch (err) {
|
||||
reject(err)
|
||||
}
|
||||
});
|
||||
doc.end();
|
||||
}).catch(err => reject(err));
|
||||
});
|
||||
doc.end();
|
||||
})
|
||||
.catch((err) => reject(err));
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import { runDocTest } from './helpers';
|
||||
|
||||
describe('images', function() {
|
||||
test('orientation', function() {
|
||||
return runDocTest(function(doc) {
|
||||
describe('images', function () {
|
||||
test('orientation', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.fill('black');
|
||||
doc.fillColor('black');
|
||||
@ -14,154 +14,154 @@ describe('images', function() {
|
||||
40,
|
||||
10,
|
||||
{
|
||||
lineBreak: false
|
||||
}
|
||||
lineBreak: false,
|
||||
},
|
||||
);
|
||||
|
||||
doc.text('1 - No orientation needed', 40, 30, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-1.jpeg', 40, 44.0625, {
|
||||
height: 80,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 30, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-1.jpeg', 320, 44.0625, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('2 - Flip horizonatal', 40, 124.0625, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-2.jpeg', 40, 138.125, {
|
||||
height: 80,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 124.0625, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-2.jpeg', 320, 138.125, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('3 - Rotate 180 degrees', 40, 218.125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-3.jpeg', 40, 232.1875, {
|
||||
height: 80,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 218.125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-3.jpeg', 320, 232.1875, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('4 - Flip vertically', 40, 312.1875, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-4.jpeg', 40, 326.25, {
|
||||
height: 80,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 312.1875, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-4.jpeg', 320, 326.25, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('5 - Flip horizontally and rotate 270 degrees CW', 40, 406.25, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-5.jpeg', 40, 420.3125, {
|
||||
height: 80,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 406.25, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-5.jpeg', 320, 420.3125, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('6 - Rotate 90 degrees CW', 40, 500.3125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-6.jpeg', 40, 514.375, {
|
||||
height: 80,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 500.3125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-6.jpeg', 320, 514.375, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('7 - Flip horizontally and rotate 90 degrees CW', 40, 594.375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-7.jpeg', 40, 608.4375, {
|
||||
height: 80,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 594.375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-7.jpeg', 320, 608.4375, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('8 - Rotate 270 degrees CW', 40, 688.4375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-8.jpeg', 40, 702.5, {
|
||||
height: 80,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 688.4375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-8.jpeg', 320, 702.5, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('orientation - with cover and alignment', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('orientation - with cover and alignment', function () {
|
||||
return runDocTest(function (doc) {
|
||||
let options = {
|
||||
align: 'center',
|
||||
cover: [60, 60],
|
||||
valign: 'center'
|
||||
valign: 'center',
|
||||
};
|
||||
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
@ -175,24 +175,24 @@ describe('images', function() {
|
||||
40,
|
||||
10,
|
||||
{
|
||||
lineBreak: false
|
||||
}
|
||||
lineBreak: false,
|
||||
},
|
||||
);
|
||||
|
||||
// Orientation 1
|
||||
doc.text('1 - No orientation needed', 40, 30, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-1.jpeg', 40, 44.0625, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 44.0625, 60, 60).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 30, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-1.jpeg', 320, 44.0625, options);
|
||||
@ -201,18 +201,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 2
|
||||
doc.text('2 - Flip horizonatal', 40, 124.0625, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-2.jpeg', 40, 138.125, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 138.125, 60, 60).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 124.0625, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-2.jpeg', 320, 138.125, options);
|
||||
@ -221,18 +221,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 3
|
||||
doc.text('3 - Rotate 180 degrees', 40, 218.125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-3.jpeg', 40, 232.1875, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 232.1875, 60, 60).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 218.125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-3.jpeg', 320, 232.1875, options);
|
||||
@ -241,18 +241,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 4
|
||||
doc.text('4 - Flip vertically', 40, 312.1875, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-4.jpeg', 40, 326.25, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 326.25, 60, 60).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 312.1875, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-4.jpeg', 320, 326.25, options);
|
||||
@ -261,18 +261,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 5
|
||||
doc.text('5 - Flip horizontally and rotate 270 degrees CW', 40, 406.25, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-5.jpeg', 40, 420.3125, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 420.3125, 60, 60).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 406.25, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-5.jpeg', 320, 420.3125, options);
|
||||
@ -281,18 +281,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 6
|
||||
doc.text('6 - Rotate 90 degrees CW', 40, 500.3125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-6.jpeg', 40, 514.375, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 514.375, 60, 60).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 500.3125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-6.jpeg', 320, 514.375, options);
|
||||
@ -301,18 +301,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 7
|
||||
doc.text('7 - Flip horizontally and rotate 90 degrees CW', 40, 594.375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-7.jpeg', 40, 608.4375, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 608.4375, 60, 60).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 594.375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-7.jpeg', 320, 608.4375, options);
|
||||
@ -321,18 +321,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 8
|
||||
doc.text('8 - Rotate 270 degrees CW', 40, 688.4375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-8.jpeg', 40, 702.5, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 702.5, 60, 60).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 688.4375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-8.jpeg', 320, 702.5, options);
|
||||
@ -341,12 +341,12 @@ describe('images', function() {
|
||||
});
|
||||
});
|
||||
|
||||
test('orientation - with fit and alignment', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('orientation - with fit and alignment', function () {
|
||||
return runDocTest(function (doc) {
|
||||
let options = {
|
||||
align: 'center',
|
||||
fit: [80, 80],
|
||||
valign: 'center'
|
||||
valign: 'center',
|
||||
};
|
||||
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
@ -360,24 +360,24 @@ describe('images', function() {
|
||||
40,
|
||||
10,
|
||||
{
|
||||
lineBreak: false
|
||||
}
|
||||
lineBreak: false,
|
||||
},
|
||||
);
|
||||
|
||||
// Orientation 1
|
||||
doc.text('1 - No orientation needed', 40, 30, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-1.jpeg', 40, 44.0625, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 44.0625, 80, 80).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 30, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-1.jpeg', 320, 44.0625, options);
|
||||
@ -386,18 +386,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 2
|
||||
doc.text('2 - Flip horizonatal', 40, 124.0625, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-2.jpeg', 40, 138.125, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 138.125, 80, 80).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 124.0625, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-2.jpeg', 320, 138.125, options);
|
||||
@ -406,18 +406,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 3
|
||||
doc.text('3 - Rotate 180 degrees', 40, 218.125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-3.jpeg', 40, 232.1875, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 232.1875, 80, 80).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 218.125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-3.jpeg', 320, 232.1875, options);
|
||||
@ -426,18 +426,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 4
|
||||
doc.text('4 - Flip vertically', 40, 312.1875, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-4.jpeg', 40, 326.25, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 326.25, 80, 80).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 312.1875, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-4.jpeg', 320, 326.25, options);
|
||||
@ -446,18 +446,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 5
|
||||
doc.text('5 - Flip horizontally and rotate 270 degrees CW', 40, 406.25, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-5.jpeg', 40, 420.3125, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 420.3125, 80, 80).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 406.25, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-5.jpeg', 320, 420.3125, options);
|
||||
@ -466,18 +466,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 6
|
||||
doc.text('6 - Rotate 90 degrees CW', 40, 500.3125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-6.jpeg', 40, 514.375, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 514.375, 80, 80).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 500.3125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-6.jpeg', 320, 514.375, options);
|
||||
@ -486,18 +486,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 7
|
||||
doc.text('7 - Flip horizontally and rotate 90 degrees CW', 40, 594.375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-7.jpeg', 40, 608.4375, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 608.4375, 80, 80).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 594.375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-7.jpeg', 320, 608.4375, options);
|
||||
@ -506,18 +506,18 @@ describe('images', function() {
|
||||
|
||||
// Orientation 8
|
||||
doc.text('8 - Rotate 270 degrees CW', 40, 688.4375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-8.jpeg', 40, 702.5, {
|
||||
...options,
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
});
|
||||
|
||||
doc.rect(40, 702.5, 80, 80).stroke('red');
|
||||
|
||||
doc.text('(output)', 320, 688.4375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-8.jpeg', 320, 702.5, options);
|
||||
@ -526,12 +526,12 @@ describe('images', function() {
|
||||
});
|
||||
});
|
||||
|
||||
test('orientation - document option', function() {
|
||||
test('orientation - document option', function () {
|
||||
let options = {
|
||||
ignoreOrientation: true
|
||||
ignoreOrientation: true,
|
||||
};
|
||||
|
||||
return runDocTest(options, function(doc) {
|
||||
return runDocTest(options, function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.fill('black');
|
||||
doc.fillColor('black');
|
||||
@ -543,144 +543,144 @@ describe('images', function() {
|
||||
40,
|
||||
10,
|
||||
{
|
||||
lineBreak: false
|
||||
}
|
||||
lineBreak: false,
|
||||
},
|
||||
);
|
||||
|
||||
doc.text('1 - No orientation needed', 40, 30, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-1.jpeg', 40, 44.0625, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 30, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-1.jpeg', 320, 44.0625, {
|
||||
height: 80,
|
||||
ignoreOrientation: false
|
||||
ignoreOrientation: false,
|
||||
});
|
||||
|
||||
doc.text('2 - Flip horizonatal', 40, 124.0625, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-2.jpeg', 40, 138.125, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 124.0625, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-2.jpeg', 320, 138.125, {
|
||||
height: 80,
|
||||
ignoreOrientation: false
|
||||
ignoreOrientation: false,
|
||||
});
|
||||
|
||||
doc.text('3 - Rotate 180 degrees', 40, 218.125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-3.jpeg', 40, 232.1875, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 218.125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-3.jpeg', 320, 232.1875, {
|
||||
height: 80,
|
||||
ignoreOrientation: false
|
||||
ignoreOrientation: false,
|
||||
});
|
||||
|
||||
doc.text('4 - Flip vertically', 40, 312.1875, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-4.jpeg', 40, 326.25, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 312.1875, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-4.jpeg', 320, 326.25, {
|
||||
height: 80,
|
||||
ignoreOrientation: false
|
||||
ignoreOrientation: false,
|
||||
});
|
||||
|
||||
doc.text('5 - Flip horizontally and rotate 270 degrees CW', 40, 406.25, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-5.jpeg', 40, 420.3125, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 406.25, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-5.jpeg', 320, 420.3125, {
|
||||
height: 80,
|
||||
ignoreOrientation: false
|
||||
ignoreOrientation: false,
|
||||
});
|
||||
|
||||
doc.text('6 - Rotate 90 degrees CW', 40, 500.3125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-6.jpeg', 40, 514.375, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 500.3125, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-6.jpeg', 320, 514.375, {
|
||||
height: 80,
|
||||
ignoreOrientation: false
|
||||
ignoreOrientation: false,
|
||||
});
|
||||
|
||||
doc.text('7 - Flip horizontally and rotate 90 degrees CW', 40, 594.375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-7.jpeg', 40, 608.4375, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 594.375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-7.jpeg', 320, 608.4375, {
|
||||
height: 80,
|
||||
ignoreOrientation: false
|
||||
ignoreOrientation: false,
|
||||
});
|
||||
|
||||
doc.text('8 - Rotate 270 degrees CW', 40, 688.4375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-8.jpeg', 40, 702.5, {
|
||||
height: 80
|
||||
height: 80,
|
||||
});
|
||||
|
||||
doc.text('(output)', 320, 688.4375, {
|
||||
lineBreak: false
|
||||
lineBreak: false,
|
||||
});
|
||||
|
||||
doc.image('tests/images/orientation-8.jpeg', 320, 702.5, {
|
||||
height: 80,
|
||||
ignoreOrientation: false
|
||||
ignoreOrientation: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -11,7 +11,7 @@ class NodeCanvasFactory {
|
||||
const context = canvas.getContext('2d');
|
||||
return {
|
||||
canvas,
|
||||
context
|
||||
context,
|
||||
};
|
||||
}
|
||||
|
||||
@ -38,7 +38,7 @@ async function pdf2png(data, { systemFonts } = {}) {
|
||||
// Load the PDF file.
|
||||
const loadingTask = pdfjsLib.getDocument({
|
||||
data,
|
||||
disableFontFace: !systemFonts
|
||||
disableFontFace: !systemFonts,
|
||||
});
|
||||
|
||||
const pdfDocument = await loadingTask.promise;
|
||||
@ -54,12 +54,12 @@ async function pdf2png(data, { systemFonts } = {}) {
|
||||
const canvasFactory = new NodeCanvasFactory();
|
||||
const canvasAndContext = canvasFactory.create(
|
||||
viewport.width,
|
||||
viewport.height
|
||||
viewport.height,
|
||||
);
|
||||
const renderContext = {
|
||||
canvasContext: canvasAndContext.context,
|
||||
viewport,
|
||||
canvasFactory
|
||||
canvasFactory,
|
||||
};
|
||||
const renderTask = page.render(renderContext);
|
||||
await renderTask.promise;
|
||||
|
||||
@ -400,11 +400,7 @@ describe('table', function () {
|
||||
debug: true,
|
||||
defaultStyle: { align: 'center', width: 200, height: 200 },
|
||||
columnStyles: [{ textOptions: { rotation: 90 } }],
|
||||
data: [
|
||||
[
|
||||
'Hello\nWorld',
|
||||
],
|
||||
],
|
||||
data: [['Hello\nWorld']],
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
@ -1,103 +1,117 @@
|
||||
import { runDocTest } from './helpers';
|
||||
|
||||
describe('text', function() {
|
||||
test('simple text', function() {
|
||||
return runDocTest(function(doc) {
|
||||
describe('text', function () {
|
||||
test('simple text', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.text('Really simple text', 100, 100);
|
||||
});
|
||||
});
|
||||
|
||||
test('alignment', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('alignment', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.text('Left aligned text', { align: 'left' });
|
||||
doc.text('Right aligned text', { align: 'right' });
|
||||
doc.text(
|
||||
'Justified aligned text - Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam in suscipit purus.',
|
||||
{ align: 'justify' }
|
||||
{ align: 'justify' },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('soft hyphen', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('soft hyphen', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.text(
|
||||
'Text with soft hyphen - Lorem ipsum dolor sit amet, consectetur adipiscing elit. Lo ip\u00ADsum',
|
||||
{ align: 'justify' }
|
||||
{ align: 'justify' },
|
||||
);
|
||||
doc.text(
|
||||
'Text with soft hyphen on the edge - ttttestttestttestttestttestttestttestttestttestttestttes\u00ADtt\u00ADt',
|
||||
{ align: 'justify' }
|
||||
{ align: 'justify' },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('decoration', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('decoration', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.fillColor('blue').text('Here is a link!', 100, 100, {
|
||||
link: 'http://google.com/',
|
||||
underline: true
|
||||
underline: true,
|
||||
});
|
||||
doc.text('Strike', 100, 130, {
|
||||
strike: true
|
||||
strike: true,
|
||||
});
|
||||
doc.text('Strike', 100, 160, {
|
||||
underline: true,
|
||||
strike: true
|
||||
strike: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('list', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('list', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.fillColor('#000').list(['One', 'Two', 'Three'], 100, 150);
|
||||
});
|
||||
});
|
||||
|
||||
test('list with line breaks in items', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('list with line breaks in items', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.list(['Foo\nBar', 'Foo\rBar', 'Foo\r\nBar'], [100, 150]);
|
||||
});
|
||||
});
|
||||
|
||||
test('list (numbered)', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('list (numbered)', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.fillColor('#000').list(['One', 'Two', 'Three'], 100, 150, {listType: 'numbered'});
|
||||
doc
|
||||
.fillColor('#000')
|
||||
.list(['One', 'Two', 'Three'], 100, 150, { listType: 'numbered' });
|
||||
});
|
||||
});
|
||||
|
||||
test('list (lettered)', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('list (lettered)', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.fillColor('#000').list(['One', 'Two', 'Three'], 100, 150, {listType: 'lettered'});
|
||||
doc
|
||||
.fillColor('#000')
|
||||
.list(['One', 'Two', 'Three'], 100, 150, { listType: 'lettered' });
|
||||
});
|
||||
});
|
||||
|
||||
test('list with sub-list (unordered)', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('list with sub-list (unordered)', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.fillColor('#000').list(['One', ['One.One', 'One.Two'], 'Three'], 100, 150);
|
||||
})
|
||||
})
|
||||
doc
|
||||
.fillColor('#000')
|
||||
.list(['One', ['One.One', 'One.Two'], 'Three'], 100, 150);
|
||||
});
|
||||
});
|
||||
|
||||
test('list with sub-list (ordered)', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('list with sub-list (ordered)', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.fillColor('#000').list(['One', ['One.One', 'One.Two'], 'Three'], 100, 150, {listType: 'numbered'});
|
||||
})
|
||||
})
|
||||
doc
|
||||
.fillColor('#000')
|
||||
.list(['One', ['One.One', 'One.Two'], 'Three'], 100, 150, {
|
||||
listType: 'numbered',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('continued text with OpenType features', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('continued text with OpenType features', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc.font('tests/fonts/Roboto-Regular.ttf');
|
||||
doc.text('Really simple', 100, 100, {features: ['smcp'], continued: true, lineBreak: false})
|
||||
doc.text(' text', {features: [], lineBreak: false});
|
||||
doc.text('Really simple', 100, 100, {
|
||||
features: ['smcp'],
|
||||
continued: true,
|
||||
lineBreak: false,
|
||||
});
|
||||
doc.text(' text', { features: [], lineBreak: false });
|
||||
});
|
||||
});
|
||||
|
||||
@ -159,12 +173,12 @@ describe('text', function() {
|
||||
// Draw text
|
||||
.text(text, 300, 300, { rotation: 45 })
|
||||
// Draw text rect (rotation is negative because of the flipped axis
|
||||
.rotate(-45, {origin: [300,300]})
|
||||
.rotate(-45, { origin: [300, 300] })
|
||||
.rect(300, 300, textBounds.width, textBounds.height)
|
||||
.stroke("pink")
|
||||
.stroke('pink')
|
||||
// Draw origin
|
||||
.circle(300, 300, 2)
|
||||
.fill('blue')
|
||||
.fill('blue');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
import { runDocTest } from './helpers';
|
||||
var tiger = require('../../examples/tiger');
|
||||
|
||||
describe('vector', function() {
|
||||
test('simple shapes', function() {
|
||||
return runDocTest(function(doc) {
|
||||
describe('vector', function () {
|
||||
test('simple shapes', function () {
|
||||
return runDocTest(function (doc) {
|
||||
doc
|
||||
.save()
|
||||
.moveTo(100, 150)
|
||||
@ -23,8 +23,8 @@ describe('vector', function() {
|
||||
});
|
||||
});
|
||||
|
||||
test('complex svg', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('complex svg', function () {
|
||||
return runDocTest(function (doc) {
|
||||
var i, len, part;
|
||||
doc.translate(220, 300);
|
||||
for (i = 0, len = tiger.length; i < len; i++) {
|
||||
@ -49,8 +49,8 @@ describe('vector', function() {
|
||||
});
|
||||
});
|
||||
|
||||
test('svg path', function() {
|
||||
return runDocTest(function(doc) {
|
||||
test('svg path', function () {
|
||||
return runDocTest(function (doc) {
|
||||
// extracted from https://developer.mozilla.org/en-US/docs/Web/SVG/Tutorial/Paths
|
||||
// lines
|
||||
doc.path('M10 10 H 90 V 90 H 10 L 10 10');
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user