mirror of
https://github.com/foliojs/pdfkit.git
synced 2026-02-01 16:56:57 +00:00
Merge pull request #212 from devongovett/streams
API Change Announcement: Streams instead of write/output methods
This commit is contained in:
commit
84375a8220
@ -55,7 +55,12 @@ Installation uses the [npm](http://npmjs.org/) package manager. Just type the f
|
||||
## Example
|
||||
|
||||
PDFDocument = require 'pdfkit'
|
||||
|
||||
# Create a document
|
||||
doc = new PDFDocument
|
||||
|
||||
# Pipe it's output somewhere, like to a file or HTTP response
|
||||
doc.pipe fs.createWriteStream('output.pdf')
|
||||
|
||||
# Embed a font, set the font size, and render some text
|
||||
doc.font('fonts/PalatinoBold.ttf')
|
||||
@ -88,8 +93,8 @@ Installation uses the [npm](http://npmjs.org/) package manager. Just type the f
|
||||
.underline(100, 100, 160, 27, color: "#0000FF")
|
||||
.link(100, 100, 160, 27, 'http://google.com/')
|
||||
|
||||
# Write the PDF file to disk
|
||||
doc.write 'output.pdf'
|
||||
# Finalize PDF file
|
||||
doc.end()
|
||||
|
||||
[The PDF output from this example](http://pdfkit.org/demo/out.pdf) (with a few additions) shows the power of PDFKit — producing
|
||||
complex documents with a very small amount of code. For more, see the `demo` folder and the
|
||||
|
||||
BIN
demo/out.pdf
BIN
demo/out.pdf
Binary file not shown.
@ -1,8 +1,10 @@
|
||||
PDFDocument = require 'pdfkit'
|
||||
PDFDocument = require '../'
|
||||
tiger = require './tiger'
|
||||
fs = require 'fs'
|
||||
|
||||
# Create a new PDFDocument
|
||||
doc = new PDFDocument
|
||||
doc.pipe fs.createWriteStream('out.pdf')
|
||||
|
||||
# Set some meta data
|
||||
doc.info['Title'] = 'Test Document'
|
||||
@ -87,4 +89,4 @@ doc.fillColor('#000')
|
||||
.font('fonts/Chalkboard.ttc', 'Chalkboard', 16)
|
||||
.list(['One', 'Two', 'Three'], 100, 150)
|
||||
|
||||
doc.write 'out.pdf'
|
||||
doc.end()
|
||||
@ -242,10 +242,11 @@ renderTitlePage = (doc) ->
|
||||
# render all sections of the guide and write the pdf file
|
||||
do ->
|
||||
doc = new PDFDocument
|
||||
doc.pipe fs.createWriteStream('guide.pdf')
|
||||
renderTitlePage doc
|
||||
render doc, 'getting_started.coffee.md'
|
||||
render doc, 'vector.coffee.md'
|
||||
render doc, 'text.coffee.md'
|
||||
render doc, 'images.coffee.md'
|
||||
render doc, 'annotations.coffee.md'
|
||||
doc.write 'guide.pdf'
|
||||
doc.end()
|
||||
|
||||
@ -15,6 +15,20 @@ in your CoffeeScript or JavaScript source file and create an instance of the
|
||||
|
||||
PDFDocument = require 'pdfkit'
|
||||
doc = new PDFDocument
|
||||
|
||||
`PDFDocument` instances are readable Node streams. They don't get saved anywhere automatically,
|
||||
but you can call the `pipe` method to send the output of the PDF document to another
|
||||
writable Node stream as it is being written. When you're done with your document, call
|
||||
the `end` method to finalize it. Here is an example showing how to pipe to a file or an HTTP response.
|
||||
|
||||
doc.pipe fs.createWriteStream('/path/to/file.pdf') # write to PDF
|
||||
doc.pipe fs.createWriteStream(res) # HTTP response
|
||||
|
||||
# add stuff to PDF here using methods described below...
|
||||
|
||||
doc.end() # finalize the PDF and end the stream
|
||||
|
||||
The `write` and `output` methods found in PDFKit before version 0.5 are now deprecated.
|
||||
|
||||
### Adding pages
|
||||
|
||||
@ -53,8 +67,6 @@ For example:
|
||||
doc.addPage
|
||||
margins: { top: 50, bottom: 50, left: 72, right: 72 }
|
||||
|
||||
* * *
|
||||
|
||||
### Setting document metadata
|
||||
|
||||
PDF documents can have various metadata associated with them, such as the
|
||||
@ -76,15 +88,8 @@ capitalized.
|
||||
### Adding content
|
||||
|
||||
Once you've created a `PDFDocument` instance, you can add content to the
|
||||
document. Check out the other sections to the left under "Documentation" to
|
||||
document. Check out the other sections described in this document to
|
||||
learn about each type of content you can add.
|
||||
|
||||
### Saving the document
|
||||
|
||||
When you are ready to write the PDF document to a file, just call the `write`
|
||||
method with a filename. If you want to send the document in response to an
|
||||
HTTP request, or just need a string representation of the document, just call
|
||||
the `output` method.
|
||||
|
||||
That's the basics! Now let's move on to PDFKit's powerful vector graphics
|
||||
abilities.
|
||||
|
||||
BIN
docs/guide.pdf
BIN
docs/guide.pdf
Binary file not shown.
@ -3,14 +3,16 @@ PDFDocument - represents an entire PDF document
|
||||
By Devon Govett
|
||||
###
|
||||
|
||||
stream = require 'stream'
|
||||
fs = require 'fs'
|
||||
PDFObjectStore = require './store'
|
||||
PDFObject = require './object'
|
||||
PDFReference = require './reference'
|
||||
PDFPage = require './page'
|
||||
|
||||
class PDFDocument
|
||||
class PDFDocument extends stream.Readable
|
||||
constructor: (@options = {}) ->
|
||||
super
|
||||
|
||||
# PDF version
|
||||
@version = 1.3
|
||||
|
||||
@ -18,10 +20,17 @@ class PDFDocument
|
||||
@compress = yes
|
||||
|
||||
# The PDF object store
|
||||
@store = new PDFObjectStore
|
||||
|
||||
# A list of pages in this document
|
||||
@pages = []
|
||||
@_offsets = []
|
||||
@_waiting = 0
|
||||
@_ended = false
|
||||
@_offset = 0
|
||||
|
||||
@_root = @ref
|
||||
Type: 'Catalog'
|
||||
Pages: @ref
|
||||
Type: 'Pages'
|
||||
Count: 0
|
||||
Kids: []
|
||||
|
||||
# The current page
|
||||
@page = null
|
||||
@ -33,16 +42,22 @@ class PDFDocument
|
||||
@initText()
|
||||
@initImages()
|
||||
|
||||
# Create the metadata
|
||||
@_info = @ref
|
||||
# Initialize the metadata
|
||||
@info =
|
||||
Producer: 'PDFKit'
|
||||
Creator: 'PDFKit'
|
||||
CreationDate: new Date()
|
||||
|
||||
@info = @_info.data
|
||||
|
||||
if @options.info
|
||||
@info[key] = val for key, val of @options.info
|
||||
delete @options.info
|
||||
for key, val of @options.info
|
||||
@info[key] = val
|
||||
|
||||
# Write the header
|
||||
# PDF version
|
||||
@_write "%PDF-#{@version}"
|
||||
|
||||
# 4 binary chars, as recommended by the spec
|
||||
@_write "%\xFF\xFF\xFF\xFF"
|
||||
|
||||
# Add the first page
|
||||
@addPage()
|
||||
@ -61,12 +76,16 @@ class PDFDocument
|
||||
mixin 'annotations'
|
||||
|
||||
addPage: (options = @options) ->
|
||||
# end the current page if needed
|
||||
@page?.end()
|
||||
|
||||
# create a page object
|
||||
@page = new PDFPage(this, options)
|
||||
|
||||
# add the page to the object store
|
||||
@store.addPage @page
|
||||
@pages.push @page
|
||||
pages = @_root.data.Pages.data
|
||||
pages.Kids.push @page.dictionary
|
||||
pages.Count++
|
||||
|
||||
# reset x and y coordinates
|
||||
@x = @page.margins.left
|
||||
@ -80,90 +99,98 @@ class PDFDocument
|
||||
return this
|
||||
|
||||
ref: (data) ->
|
||||
@store.ref(data)
|
||||
ref = new PDFReference(this, @_offsets.length + 1, data)
|
||||
@_offsets.push null # placeholder for this object's offset once it is finalized
|
||||
@_waiting++
|
||||
return ref
|
||||
|
||||
addContent: (str) ->
|
||||
@page.content.add str
|
||||
return this # make chaining possible
|
||||
_read: ->
|
||||
# do nothing, but this method is required by node
|
||||
|
||||
_write: (data) ->
|
||||
unless Buffer.isBuffer(data)
|
||||
data = new Buffer(data + '\n', 'binary')
|
||||
|
||||
@push data
|
||||
@_offset += data.length
|
||||
|
||||
addContent: (data) ->
|
||||
@page.write data
|
||||
return this
|
||||
|
||||
_refEnd: (ref) ->
|
||||
@_offsets[ref.id - 1] = ref.offset
|
||||
if --@_waiting is 0 and @_ended
|
||||
@_finalize()
|
||||
@_ended = false
|
||||
|
||||
write: (filename, fn) ->
|
||||
@output (out) ->
|
||||
fs.writeFile filename, out, 'binary', fn
|
||||
# print a deprecation warning with a stacktrace
|
||||
err = new Error '
|
||||
PDFDocument#write is deprecated, and will be removed in a future version of PDFKit.
|
||||
Please pipe the document into a Node stream.
|
||||
'
|
||||
|
||||
console.warn err.stack
|
||||
|
||||
@pipe fs.createWriteStream(filename)
|
||||
@end()
|
||||
@once 'end', fn
|
||||
|
||||
output: (fn) ->
|
||||
@finalize =>
|
||||
out = []
|
||||
@generateHeader out
|
||||
@generateBody out, =>
|
||||
@generateXRef out
|
||||
@generateTrailer out
|
||||
# more difficult to support this. It would involve concatenating all the buffers together
|
||||
throw new Error '
|
||||
PDFDocument#output is deprecated, and has been removed from PDFKit.
|
||||
Please pipe the document into a Node stream.
|
||||
'
|
||||
|
||||
ret = []
|
||||
for k in out
|
||||
ret.push(k + '\n')
|
||||
|
||||
fn new Buffer(ret.join(''),'binary')
|
||||
end: ->
|
||||
@page.end()
|
||||
|
||||
finalize: (fn) ->
|
||||
# convert strings in the info dictionary to literals
|
||||
for key, val of @info when typeof val is 'string'
|
||||
@info[key] = PDFObject.s val, true
|
||||
@_info = @ref()
|
||||
for key, val of @info
|
||||
if typeof val is 'string'
|
||||
val = PDFObject.s val, true
|
||||
|
||||
@_info.data[key] = val
|
||||
|
||||
@_info.end()
|
||||
|
||||
# embed the subsetted fonts
|
||||
@embedFonts =>
|
||||
# embed the images
|
||||
@embedImages =>
|
||||
done = 0
|
||||
cb = => fn() if ++done is @pages.length
|
||||
|
||||
# finalize each page
|
||||
for page in @pages
|
||||
page.finalize(cb)
|
||||
for name, font of @_fontFamilies
|
||||
font.embed()
|
||||
|
||||
@_root.end()
|
||||
@_root.data.Pages.end()
|
||||
|
||||
generateHeader: (out) ->
|
||||
# PDF version
|
||||
out.push "%PDF-#{@version}"
|
||||
|
||||
# 4 binary chars, as recommended by the spec
|
||||
out.push "%\xFF\xFF\xFF\xFF\n"
|
||||
return out
|
||||
if @_waiting is 0
|
||||
@_finalize()
|
||||
else
|
||||
@_ended = true
|
||||
|
||||
generateBody: (out, fn) ->
|
||||
offset = out.join('\n').length + 1
|
||||
_finalize: (fn) ->
|
||||
# generate xref
|
||||
xRefOffset = @_offset
|
||||
@_write "xref"
|
||||
@_write "0 #{@_offsets.length + 1}"
|
||||
@_write "0000000000 65535 f "
|
||||
|
||||
refs = (ref for id, ref of @store.objects)
|
||||
do proceed = =>
|
||||
if ref = refs.shift()
|
||||
ref.object @compress, (object) ->
|
||||
ref.offset = offset
|
||||
out.push object
|
||||
offset += object.length + 1 # plus one for newline
|
||||
proceed()
|
||||
else
|
||||
@xref_offset = offset
|
||||
fn()
|
||||
|
||||
generateXRef: (out) ->
|
||||
len = @store.length + 1
|
||||
out.push "xref"
|
||||
out.push "0 #{len}"
|
||||
out.push "0000000000 65535 f "
|
||||
|
||||
for id, ref of @store.objects
|
||||
offset = ('0000000000' + ref.offset).slice(-10)
|
||||
out.push offset + ' 00000 n '
|
||||
|
||||
generateTrailer: (out) ->
|
||||
trailer = PDFObject.convert
|
||||
Size: @store.length + 1
|
||||
Root: @store.root
|
||||
for offset in @_offsets
|
||||
offset = ('0000000000' + offset).slice(-10)
|
||||
@_write offset + ' 00000 n '
|
||||
|
||||
# trailer
|
||||
@_write 'trailer'
|
||||
@_write PDFObject.convert
|
||||
Size: @_offsets.length
|
||||
Root: @_root
|
||||
Info: @_info
|
||||
|
||||
@_write 'startxref'
|
||||
@_write "#{xRefOffset}"
|
||||
@_write '%%EOF'
|
||||
|
||||
out.push 'trailer'
|
||||
out.push trailer
|
||||
out.push 'startxref'
|
||||
out.push @xref_offset
|
||||
out.push '%%EOF'
|
||||
# end the stream
|
||||
@push null
|
||||
|
||||
toString: ->
|
||||
"[object PDFDocument]"
|
||||
|
||||
107
lib/font.coffee
107
lib/font.coffee
@ -10,10 +10,12 @@ zlib = require 'zlib'
|
||||
|
||||
class PDFFont
|
||||
constructor: (@document, @filename, @family, @id) ->
|
||||
@ref = @document.ref()
|
||||
|
||||
if @filename in @_standardFonts
|
||||
@isAFM = true
|
||||
@font = AFMFont.open __dirname + "/font/data/#{@filename}.afm"
|
||||
@registerStandard()
|
||||
@registerAFM()
|
||||
|
||||
else if /\.(ttf|ttc)$/i.test @filename
|
||||
@font = TTFFont.open @filename, @family
|
||||
@ -31,9 +33,11 @@ class PDFFont
|
||||
use: (characters) ->
|
||||
@subset?.use characters
|
||||
|
||||
embed: (fn) ->
|
||||
return fn() if @isAFM
|
||||
@embedTTF fn
|
||||
embed: ->
|
||||
if @isAFM
|
||||
@embedAFM()
|
||||
else
|
||||
@embedTTF()
|
||||
|
||||
encode: (text) ->
|
||||
if @isAFM
|
||||
@ -74,59 +78,49 @@ class PDFFont
|
||||
@flags |= 1 << 5 # assume the font is nonsymbolic...
|
||||
|
||||
throw new Error 'No unicode cmap for font' if not @font.cmap.unicode
|
||||
|
||||
embedTTF: ->
|
||||
data = @subset.encode()
|
||||
fontfile = @document.ref()
|
||||
fontfile.write data
|
||||
|
||||
# Create a placeholder reference to be filled in embedTTF.
|
||||
@ref = @document.ref
|
||||
fontfile.data.Length1 = fontfile.uncompressedLength
|
||||
fontfile.end()
|
||||
|
||||
descriptor = @document.ref
|
||||
Type: 'FontDescriptor'
|
||||
FontName: @subset.postscriptName
|
||||
FontFile2: fontfile
|
||||
FontBBox: @bbox
|
||||
Flags: @flags
|
||||
StemV: @stemV
|
||||
ItalicAngle: @italicAngle
|
||||
Ascent: @ascender
|
||||
Descent: @decender
|
||||
CapHeight: @capHeight
|
||||
XHeight: @xHeight
|
||||
|
||||
descriptor.end()
|
||||
|
||||
firstChar = +Object.keys(@subset.cmap)[0]
|
||||
charWidths = for code, glyph of @subset.cmap
|
||||
Math.round @font.widthOfGlyph(glyph)
|
||||
|
||||
cmap = @document.ref()
|
||||
cmap.end toUnicodeCmap(@subset.subset)
|
||||
|
||||
@ref.data =
|
||||
Type: 'Font'
|
||||
BaseFont: @subset.postscriptName
|
||||
Subtype: 'TrueType'
|
||||
|
||||
embedTTF: (fn) ->
|
||||
data = @subset.encode()
|
||||
zlib.deflate data, (err, compressedData) =>
|
||||
throw err if err
|
||||
FontDescriptor: descriptor
|
||||
FirstChar: firstChar
|
||||
LastChar: firstChar + charWidths.length - 1
|
||||
Widths: charWidths
|
||||
Encoding: 'MacRomanEncoding'
|
||||
ToUnicode: cmap
|
||||
|
||||
@fontfile = @document.ref
|
||||
Length: compressedData.length
|
||||
Length1: data.length
|
||||
Filter: 'FlateDecode'
|
||||
|
||||
@fontfile.add compressedData
|
||||
|
||||
@descriptor = @document.ref
|
||||
Type: 'FontDescriptor'
|
||||
FontName: @subset.postscriptName
|
||||
FontFile2: @fontfile
|
||||
FontBBox: @bbox
|
||||
Flags: @flags
|
||||
StemV: @stemV
|
||||
ItalicAngle: @italicAngle
|
||||
Ascent: @ascender
|
||||
Descent: @decender
|
||||
CapHeight: @capHeight
|
||||
XHeight: @xHeight
|
||||
|
||||
firstChar = +Object.keys(@subset.cmap)[0]
|
||||
charWidths = for code, glyph of @subset.cmap
|
||||
Math.round @font.widthOfGlyph(glyph)
|
||||
|
||||
cmap = @document.ref()
|
||||
cmap.add toUnicodeCmap(@subset.subset)
|
||||
|
||||
ref =
|
||||
Type: 'Font'
|
||||
BaseFont: @subset.postscriptName
|
||||
Subtype: 'TrueType'
|
||||
FontDescriptor: @descriptor
|
||||
FirstChar: firstChar
|
||||
LastChar: firstChar + charWidths.length - 1
|
||||
Widths: @document.ref charWidths
|
||||
Encoding: 'MacRomanEncoding'
|
||||
ToUnicode: cmap
|
||||
|
||||
for key, val of ref
|
||||
@ref.data[key] = val
|
||||
|
||||
cmap.finalize(@document.compress, fn) # compress it
|
||||
@ref.end()
|
||||
|
||||
toUnicodeCmap = (map) ->
|
||||
unicodeMap = '''
|
||||
@ -164,15 +158,18 @@ class PDFFont
|
||||
end
|
||||
'''
|
||||
|
||||
registerStandard: ->
|
||||
registerAFM: ->
|
||||
{@ascender,@decender,@bbox,@lineGap} = @font
|
||||
|
||||
@ref = @document.ref
|
||||
embedAFM: ->
|
||||
@ref.data =
|
||||
Type: 'Font'
|
||||
BaseFont: @filename
|
||||
Subtype: 'Type1'
|
||||
Encoding: 'WinAnsiEncoding'
|
||||
|
||||
@ref.end()
|
||||
|
||||
_standardFonts: [
|
||||
"Courier"
|
||||
"Courier-Bold"
|
||||
|
||||
@ -3,6 +3,7 @@ class PDFGradient
|
||||
@stops = []
|
||||
@embedded = no
|
||||
@transform = [1, 0, 0, 1, 0, 0]
|
||||
@_colorSpace = 'DeviceRGB'
|
||||
|
||||
stop: (pos, color, opacity = 1) ->
|
||||
opacity = Math.max(0, Math.min(1, opacity))
|
||||
@ -35,6 +36,7 @@ class PDFGradient
|
||||
N: 1
|
||||
|
||||
stops.push fn
|
||||
fn.end()
|
||||
|
||||
# if there are only two stops, we don't need a stitching function
|
||||
if stops.length is 1
|
||||
@ -47,6 +49,8 @@ class PDFGradient
|
||||
Bounds: bounds
|
||||
Encode: encode
|
||||
|
||||
fn.end()
|
||||
|
||||
@id = 'Sh' + (++@doc._gradCount)
|
||||
|
||||
# apply gradient transform to existing document ctm
|
||||
@ -60,26 +64,40 @@ class PDFGradient
|
||||
m[4] = m0 * dx + m2 * dy + m4
|
||||
m[5] = m1 * dx + m3 * dy + m5
|
||||
|
||||
shader = @shader fn
|
||||
shader.end()
|
||||
|
||||
pattern = @doc.ref
|
||||
Type: 'Pattern'
|
||||
PatternType: 2
|
||||
Shading: @shader fn
|
||||
Shading: shader
|
||||
Matrix: (+v.toFixed(5) for v in m)
|
||||
|
||||
@doc.page.patterns[@id] = pattern
|
||||
pattern.end()
|
||||
|
||||
if (@stops.some (stop) -> stop[2] < 1)
|
||||
grad = @opacityGradient()
|
||||
grad._colorSpace = 'DeviceGray'
|
||||
|
||||
for stop in @stops
|
||||
grad.stop stop[0], [stop[2]]
|
||||
|
||||
grad = grad.embed()
|
||||
grad.data.Shading.data.ColorSpace = 'DeviceGray'
|
||||
|
||||
group = @doc.ref
|
||||
Type: 'Group'
|
||||
S: 'Transparency'
|
||||
CS: 'DeviceGray'
|
||||
|
||||
group.end()
|
||||
|
||||
resources = @doc.ref
|
||||
ProcSet: ['PDF', 'Text', 'ImageB', 'ImageC', 'ImageI']
|
||||
Shading:
|
||||
Sh1: grad.data.Shading
|
||||
|
||||
resources.end()
|
||||
|
||||
form = @doc.ref
|
||||
Type: 'XObject'
|
||||
@ -87,17 +105,16 @@ class PDFGradient
|
||||
FormType: 1
|
||||
BBox: [0, 0, @doc.page.width, @doc.page.height]
|
||||
Group: group
|
||||
Resources: @doc.ref
|
||||
ProcSet: ['PDF', 'Text', 'ImageB', 'ImageC', 'ImageI']
|
||||
Shading:
|
||||
Sh1: grad.data.Shading
|
||||
Resources: resources
|
||||
|
||||
form.add "/Sh1 sh"
|
||||
form.end "/Sh1 sh"
|
||||
|
||||
sMask = @doc.ref
|
||||
Type: 'Mask'
|
||||
S: 'Luminosity'
|
||||
G: form
|
||||
|
||||
sMask.end()
|
||||
|
||||
gstate = @doc.ref
|
||||
Type: 'ExtGState'
|
||||
@ -107,6 +124,7 @@ class PDFGradient
|
||||
name = "Gs#{@opacity_id}"
|
||||
|
||||
@doc.page.ext_gstates[name] = gstate
|
||||
gstate.end()
|
||||
|
||||
return pattern
|
||||
|
||||
@ -115,7 +133,6 @@ class PDFGradient
|
||||
@doc.addContent "/#{@id} #{op}"
|
||||
|
||||
if @opacity_id
|
||||
@doc.save()
|
||||
@doc.addContent "/Gs#{@opacity_id} gs"
|
||||
@doc._sMasked = true
|
||||
|
||||
@ -126,7 +143,7 @@ class PDFLinearGradient extends PDFGradient
|
||||
shader: (fn) ->
|
||||
@doc.ref
|
||||
ShadingType: 2
|
||||
ColorSpace: 'DeviceRGB'
|
||||
ColorSpace: @_colorSpace
|
||||
Coords: [@x1, @y1, @x2, @y2]
|
||||
Function: fn
|
||||
Extend: [true, true]
|
||||
@ -141,7 +158,7 @@ class PDFRadialGradient extends PDFGradient
|
||||
shader: (fn) ->
|
||||
@doc.ref
|
||||
ShadingType: 3
|
||||
ColorSpace: 'DeviceRGB'
|
||||
ColorSpace: @_colorSpace
|
||||
Coords: [@x1, @y1, @r1, @x2, @y2, @r2]
|
||||
Function: fn
|
||||
Extend: [true, true]
|
||||
|
||||
@ -9,25 +9,18 @@ JPEG = require './image/jpeg'
|
||||
PNG = require './image/png'
|
||||
|
||||
class PDFImage
|
||||
@open: (filenameOrBuffer) ->
|
||||
if typeof filenameOrBuffer is 'object' and filenameOrBuffer instanceof Buffer
|
||||
@contents = filenameOrBuffer
|
||||
@open: (src, label) ->
|
||||
if Buffer.isBuffer(src)
|
||||
data = src
|
||||
else
|
||||
@contents = fs.readFileSync filenameOrBuffer
|
||||
return unless @contents
|
||||
|
||||
@data = new Data @contents
|
||||
@filter = null
|
||||
data = fs.readFileSync src
|
||||
return unless data
|
||||
|
||||
# load info
|
||||
data = @data
|
||||
firstByte = data.byteAt(0)
|
||||
|
||||
if firstByte is 0xFF and data.byteAt(1) is 0xD8
|
||||
return new JPEG(data)
|
||||
if data[0] is 0xff and data[1] is 0xd8
|
||||
return new JPEG(data, label)
|
||||
|
||||
else if firstByte is 0x89 and data.stringAt(1, 3) is "PNG"
|
||||
return new PNG(data)
|
||||
else if data[0] is 0x89 and data.toString('ascii', 1, 4) is 'PNG'
|
||||
return new PNG(data, label)
|
||||
|
||||
else
|
||||
throw new Error 'Unknown image format.'
|
||||
|
||||
@ -1,39 +1,42 @@
|
||||
fs = require 'fs'
|
||||
Data = '../data'
|
||||
setImmediate = global.setImmediate ? process.nextTick # backfill for node <0.10
|
||||
|
||||
class JPEG
|
||||
constructor: (@data) ->
|
||||
len = data.length
|
||||
|
||||
if data.readUInt16() isnt 0xFFD8
|
||||
MARKERS = [0xFFC0, 0xFFC1, 0xFFC2, 0xFFC3, 0xFFC5, 0xFFC6, 0xFFC7,
|
||||
0xFFC8, 0xFFC9, 0xFFCA, 0xFFCB, 0xFFCC, 0xFFCD, 0xFFCE, 0xFFCF]
|
||||
|
||||
constructor: (@data, @label) ->
|
||||
if data.readUInt16BE(0) isnt 0xFFD8
|
||||
throw "SOI not found in JPEG"
|
||||
|
||||
pos = 2
|
||||
while pos < data.length
|
||||
marker = data.readUInt16BE(pos)
|
||||
pos += 2
|
||||
break if marker in MARKERS
|
||||
pos += data.readUInt16BE(pos)
|
||||
|
||||
markers = [0xFFC0, 0xFFC1, 0xFFC2, 0xFFC3, 0xFFC5, 0xFFC6, 0xFFC7,
|
||||
0xFFC8, 0xFFC9, 0xFFCA, 0xFFCB, 0xFFCC, 0xFFCD, 0xFFCE, 0xFFCF]
|
||||
|
||||
while data.pos < len
|
||||
marker = data.readUInt16()
|
||||
break if marker in markers
|
||||
data.pos += data.readUInt16()
|
||||
|
||||
throw "Invalid JPEG." unless marker in markers
|
||||
data.pos += 2
|
||||
throw "Invalid JPEG." unless marker in MARKERS
|
||||
pos += 2
|
||||
|
||||
@bits = data.readByte()
|
||||
@height = data.readShort()
|
||||
@width = data.readShort()
|
||||
@bits = data[pos++]
|
||||
@height = data.readUInt16BE(pos)
|
||||
pos += 2
|
||||
|
||||
channels = data.readByte()
|
||||
@width = data.readUInt16BE(pos)
|
||||
pos += 2
|
||||
|
||||
channels = data[pos++]
|
||||
@colorSpace = switch channels
|
||||
when 1 then 'DeviceGray'
|
||||
when 3 then 'DeviceRGB'
|
||||
when 4 then 'DeviceCMYK'
|
||||
|
||||
@imgData = @data
|
||||
@obj = null
|
||||
|
||||
embed: (document) ->
|
||||
return if @obj
|
||||
|
||||
object: (document, fn) ->
|
||||
obj = document.ref
|
||||
@obj = document.ref
|
||||
Type: 'XObject'
|
||||
Subtype: 'Image'
|
||||
BitsPerComponent: @bits
|
||||
@ -47,9 +50,11 @@ class JPEG
|
||||
# min and max values from the default, we invert the colors. See
|
||||
# section 4.8.4 of the spec.
|
||||
if @colorSpace is 'DeviceCMYK'
|
||||
obj.data['Decode'] = [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0]
|
||||
@obj.data['Decode'] = [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0]
|
||||
|
||||
obj.add @data.data
|
||||
setImmediate -> fn(obj)
|
||||
@obj.end @data
|
||||
|
||||
# free memory
|
||||
@data = null
|
||||
|
||||
module.exports = JPEG
|
||||
|
||||
@ -1,31 +1,18 @@
|
||||
zlib = require 'zlib'
|
||||
PNG = require 'png-js'
|
||||
setImmediate = global.setImmediate ? process.nextTick # backfill for node <0.10
|
||||
|
||||
class PNGImage
|
||||
constructor: (data) ->
|
||||
@image = new PNG(data.data)
|
||||
constructor: (data, @label) ->
|
||||
@image = new PNG(data)
|
||||
@width = @image.width
|
||||
@height = @image.height
|
||||
@imgData = @image.imgData
|
||||
@obj = null
|
||||
|
||||
object: (document, fn) ->
|
||||
# get the async stuff out of the way first
|
||||
if not @alphaChannel
|
||||
if @image.transparency.indexed
|
||||
# Create a transparency SMask for the image based on the data
|
||||
# in the PLTE and tRNS sections. See below for details on SMasks.
|
||||
@loadIndexedAlphaChannel => @object document, fn
|
||||
return
|
||||
|
||||
else if @image.hasAlphaChannel
|
||||
# For PNG color types 4 and 6, the transparency data is stored as a alpha
|
||||
# channel mixed in with the main image data. Separate this data out into an
|
||||
# SMask object and store it separately in the PDF.
|
||||
@splitAlphaChannel => @object document, fn
|
||||
return
|
||||
|
||||
obj = document.ref
|
||||
embed: (@document) ->
|
||||
return if @obj
|
||||
|
||||
@obj = document.ref
|
||||
Type: 'XObject'
|
||||
Subtype: 'Image'
|
||||
BitsPerComponent: @image.bits
|
||||
@ -33,25 +20,28 @@ class PNGImage
|
||||
Height: @height
|
||||
Length: @imgData.length
|
||||
Filter: 'FlateDecode'
|
||||
|
||||
|
||||
unless @image.hasAlphaChannel
|
||||
obj.data['DecodeParms'] = document.ref
|
||||
params = document.ref
|
||||
Predictor: 15
|
||||
Colors: @image.colors
|
||||
BitsPerComponent: @image.bits
|
||||
Columns: @width
|
||||
|
||||
@obj.data['DecodeParms'] = params
|
||||
params.end()
|
||||
|
||||
if @image.palette.length is 0
|
||||
obj.data['ColorSpace'] = @image.colorSpace
|
||||
@obj.data['ColorSpace'] = @image.colorSpace
|
||||
else
|
||||
# embed the color palette in the PDF as an object stream
|
||||
palette = document.ref
|
||||
Length: @image.palette.length
|
||||
|
||||
palette.add new Buffer(@image.palette)
|
||||
palette.end @image.palette
|
||||
|
||||
# build the color space array for the image
|
||||
obj.data['ColorSpace'] = ['Indexed', 'DeviceRGB', (@image.palette.length / 3) - 1, palette]
|
||||
@obj.data['ColorSpace'] = ['Indexed', 'DeviceRGB', (@image.palette.length / 3) - 1, palette]
|
||||
|
||||
# For PNG color types 0, 2 and 3, the transparency data is stored in
|
||||
# a dedicated PNG chunk.
|
||||
@ -59,7 +49,7 @@ class PNGImage
|
||||
# Use Color Key Masking (spec section 4.8.5)
|
||||
# An array with N elements, where N is two times the number of color components.
|
||||
val = @image.transparency.greyscale
|
||||
obj.data['Mask'] = [val, val]
|
||||
@obj.data['Mask'] = [val, val]
|
||||
|
||||
else if @image.transparency.rgb
|
||||
# Use Color Key Masking (spec section 4.8.5)
|
||||
@ -69,10 +59,25 @@ class PNGImage
|
||||
for x in rgb
|
||||
mask.push x, x
|
||||
|
||||
obj.data['Mask'] = mask
|
||||
@obj.data['Mask'] = mask
|
||||
|
||||
else if @image.transparency.indexed
|
||||
# Create a transparency SMask for the image based on the data
|
||||
# in the PLTE and tRNS sections. See below for details on SMasks.
|
||||
@loadIndexedAlphaChannel()
|
||||
|
||||
else if @image.hasAlphaChannel
|
||||
# For PNG color types 4 and 6, the transparency data is stored as a alpha
|
||||
# channel mixed in with the main image data. Separate this data out into an
|
||||
# SMask object and store it separately in the PDF.
|
||||
@splitAlphaChannel()
|
||||
|
||||
else
|
||||
@finalize()
|
||||
|
||||
finalize: ->
|
||||
if @alphaChannel
|
||||
sMask = document.ref
|
||||
sMask = @document.ref
|
||||
Type: 'XObject'
|
||||
Subtype: 'Image'
|
||||
Height: @height
|
||||
@ -83,14 +88,17 @@ class PNGImage
|
||||
ColorSpace: 'DeviceGray'
|
||||
Decode: [0, 1]
|
||||
|
||||
sMask.add @alphaChannel
|
||||
obj.data['SMask'] = sMask
|
||||
sMask.end @alphaChannel
|
||||
@obj.data['SMask'] = sMask
|
||||
|
||||
# add the actual image data
|
||||
obj.add @imgData
|
||||
setImmediate -> fn(obj)
|
||||
# add the actual image data
|
||||
@obj.end @imgData
|
||||
|
||||
splitAlphaChannel: (fn) ->
|
||||
# free memory
|
||||
@image = null
|
||||
@imgData = null
|
||||
|
||||
splitAlphaChannel: ->
|
||||
@image.decodePixels (pixels) =>
|
||||
colorByteSize = @image.colors * @image.bits / 8
|
||||
pixelCount = @width * @height
|
||||
@ -108,11 +116,11 @@ class PNGImage
|
||||
done = 0
|
||||
zlib.deflate imgData, (err, @imgData) =>
|
||||
throw err if err
|
||||
fn() if ++done is 2
|
||||
@finalize() if ++done is 2
|
||||
|
||||
zlib.deflate alphaChannel, (err, @alphaChannel) =>
|
||||
throw err if err
|
||||
fn() if ++done is 2
|
||||
@finalize() if ++done is 2
|
||||
|
||||
loadIndexedAlphaChannel: (fn) ->
|
||||
transparency = @image.transparency.indexed
|
||||
@ -125,6 +133,6 @@ class PNGImage
|
||||
|
||||
zlib.deflate alphaChannel, (err, @alphaChannel) =>
|
||||
throw err if err
|
||||
fn()
|
||||
|
||||
@finalize()
|
||||
|
||||
module.exports = PNGImage
|
||||
|
||||
@ -15,7 +15,9 @@ module.exports =
|
||||
for key, val of options
|
||||
options[key[0].toUpperCase() + key.slice(1)] = val
|
||||
|
||||
@page.annotations.push @ref options
|
||||
ref = @ref options
|
||||
@page.annotations.push ref
|
||||
ref.end()
|
||||
return this
|
||||
|
||||
note: (x, y, w, h, contents, options = {}) ->
|
||||
@ -31,6 +33,7 @@ module.exports =
|
||||
S: 'URI'
|
||||
URI: PDFObject.s url
|
||||
|
||||
options.A.end()
|
||||
@annotate x, y, w, h, options
|
||||
|
||||
_markup: (x, y, w, h, options = {}) ->
|
||||
|
||||
@ -43,9 +43,11 @@ module.exports =
|
||||
Type: 'ExtGState'
|
||||
SMask: 'None'
|
||||
|
||||
gstate.end()
|
||||
name = "Gs#{++@_opacityCount}"
|
||||
@page.ext_gstates[name] = gstate
|
||||
@addContent "/#{name} gs"
|
||||
@_sMasked = false
|
||||
|
||||
op = if stroke then 'SCN' else 'scn'
|
||||
|
||||
@ -108,6 +110,7 @@ module.exports =
|
||||
dictionary.CA = strokeOpacity if strokeOpacity?
|
||||
|
||||
dictionary = @ref dictionary
|
||||
dictionary.end()
|
||||
id = ++@_opacityCount
|
||||
name = "Gs#{id}"
|
||||
@_opacityRegistry[key] = [dictionary, name]
|
||||
|
||||
@ -45,11 +45,6 @@ module.exports =
|
||||
registerFont: (name, path, family) ->
|
||||
@_registeredFonts[name] =
|
||||
filename: path
|
||||
family: family
|
||||
return this
|
||||
family: family
|
||||
|
||||
embedFonts: (fn) ->
|
||||
fonts = (font for family, font of @_fontFamilies)
|
||||
do proceed = =>
|
||||
return fn() if fonts.length is 0
|
||||
fonts.shift().embed(proceed)
|
||||
return this
|
||||
|
||||
@ -13,14 +13,13 @@ module.exports =
|
||||
x = x ? options.x ? @x
|
||||
y = y ? options.y ? @y
|
||||
|
||||
if @_imageRegistry[src]
|
||||
[image, label, pages] = @_imageRegistry[src]
|
||||
pages.push @page unless @page in pages
|
||||
|
||||
else
|
||||
image = PDFImage.open(src)
|
||||
label = "I" + (++@_imageCount)
|
||||
@_imageRegistry[src] = [image, label, [@page]]
|
||||
image = @_imageRegistry[src]
|
||||
if not image
|
||||
image = PDFImage.open src, 'I' + (++@_imageCount)
|
||||
image.embed this
|
||||
@_imageRegistry[src] = image unless Buffer.isBuffer(src)
|
||||
|
||||
@page.xobjects[image.label] ?= image.obj
|
||||
|
||||
w = options.width or image.width
|
||||
h = options.height or image.height
|
||||
@ -60,20 +59,7 @@ module.exports =
|
||||
|
||||
@save()
|
||||
@transform w, 0, 0, -h, x, y + h
|
||||
@addContent "/#{label} Do"
|
||||
@addContent "/#{image.label} Do"
|
||||
@restore()
|
||||
|
||||
return this
|
||||
|
||||
embedImages: (fn) ->
|
||||
images = (item for src, item of @_imageRegistry)
|
||||
do proceed = =>
|
||||
if images.length
|
||||
[image, label, pages] = images.shift()
|
||||
image.object this, (obj) ->
|
||||
for page in pages
|
||||
page.xobjects[label] ?= obj
|
||||
|
||||
proceed()
|
||||
else
|
||||
fn()
|
||||
@ -8,7 +8,7 @@ class PDFPage
|
||||
@size = options.size or 'letter'
|
||||
@layout = options.layout or 'portrait'
|
||||
|
||||
# if margin was passed as a single number
|
||||
# process margins
|
||||
if typeof options.margin is 'number'
|
||||
@margins =
|
||||
top: options.margin
|
||||
@ -19,47 +19,50 @@ class PDFPage
|
||||
# default to 1 inch margins
|
||||
else
|
||||
@margins = options.margins or DEFAULT_MARGINS
|
||||
|
||||
|
||||
# calculate page dimensions
|
||||
dimensions = if Array.isArray(@size) then @size else SIZES[@size.toUpperCase()]
|
||||
@width = dimensions[if @layout is 'portrait' then 0 else 1]
|
||||
@height = dimensions[if @layout is 'portrait' then 1 else 0]
|
||||
|
||||
# A reference to the content of this page
|
||||
@content = @document.ref()
|
||||
|
||||
# The page dictionary
|
||||
@dictionary = @document.ref
|
||||
Type: 'Page'
|
||||
Parent: @document.store.pages
|
||||
MediaBox: [0, 0, @width, @height]
|
||||
Contents: @content
|
||||
@content = @document.ref()
|
||||
|
||||
# The resource dictionary
|
||||
@dictionary.data['Resources'] = @document.ref
|
||||
# Initialize the Font, XObject, and ExtGState dictionaries
|
||||
@resources = @document.ref
|
||||
ProcSet: ['PDF', 'Text', 'ImageB', 'ImageC', 'ImageI']
|
||||
|
||||
# Initialize the Font, XObject, and ExtGState dictionaries
|
||||
@resources = @dictionary.data['Resources'].data
|
||||
|
||||
# Lazily create these dictionaries
|
||||
Object.defineProperties this,
|
||||
fonts:
|
||||
get: => @resources['Font'] ?= {}
|
||||
get: => @resources.data.Font ?= {}
|
||||
xobjects:
|
||||
get: => @resources['XObject'] ?= {}
|
||||
get: => @resources.data.XObject ?= {}
|
||||
ext_gstates:
|
||||
get: => @resources['ExtGState'] ?= {}
|
||||
get: => @resources.data.ExtGState ?= {}
|
||||
patterns:
|
||||
get: => @resources['Pattern'] ?= {}
|
||||
get: => @resources.data.Pattern ?= {}
|
||||
annotations:
|
||||
get: => @dictionary.data['Annots'] ?= []
|
||||
|
||||
get: => @dictionary.data.Annots ?= []
|
||||
|
||||
# The page dictionary
|
||||
@dictionary = @document.ref
|
||||
Type: 'Page'
|
||||
Parent: @document._root.data.Pages
|
||||
MediaBox: [0, 0, @width, @height]
|
||||
Contents: @content
|
||||
Resources: @resources
|
||||
|
||||
maxY: ->
|
||||
@height - @margins.bottom
|
||||
|
||||
finalize: (fn) ->
|
||||
@content.finalize(@document.compress, fn)
|
||||
|
||||
|
||||
write: (chunk) ->
|
||||
@content.write chunk
|
||||
|
||||
end: ->
|
||||
@dictionary.end()
|
||||
@resources.end()
|
||||
@content.end()
|
||||
|
||||
DEFAULT_MARGINS =
|
||||
top: 72
|
||||
left: 72
|
||||
@ -117,5 +120,5 @@ class PDFPage
|
||||
LEGAL: [612.00, 1008.00]
|
||||
LETTER: [612.00, 792.00]
|
||||
TABLOID: [792.00, 1224.00]
|
||||
|
||||
|
||||
module.exports = PDFPage
|
||||
@ -4,57 +4,67 @@ By Devon Govett
|
||||
###
|
||||
|
||||
zlib = require 'zlib'
|
||||
setImmediate = global.setImmediate ? process.nextTick # backfill for node < 0.10
|
||||
|
||||
class PDFReference
|
||||
constructor: (@id, @data = {}) ->
|
||||
constructor: (@document, @id, @data = {}) ->
|
||||
@gen = 0
|
||||
@stream = null
|
||||
@finalizedStream = null
|
||||
@deflate = null
|
||||
@compress = @document.compress and not @data.Filter
|
||||
@uncompressedLength = 0
|
||||
@chunks = []
|
||||
|
||||
object: (compress, fn) ->
|
||||
unless @finalizedStream?
|
||||
return @finalize compress, => @object compress, fn
|
||||
initDeflate: ->
|
||||
@data.Filter = 'FlateDecode'
|
||||
|
||||
out = ["#{@id} #{@gen} obj"]
|
||||
out.push PDFObject.convert(@data)
|
||||
@deflate = zlib.createDeflate()
|
||||
@deflate.on 'data', (chunk) =>
|
||||
@chunks.push chunk
|
||||
@data.Length += chunk.length
|
||||
|
||||
@deflate.on 'end', @finalize
|
||||
|
||||
write: (chunk) ->
|
||||
unless Buffer.isBuffer(chunk)
|
||||
chunk = new Buffer(chunk + '\n', 'binary')
|
||||
|
||||
@uncompressedLength += chunk.length
|
||||
@data.Length ?= 0
|
||||
|
||||
if @stream
|
||||
out.push "stream"
|
||||
out.push @finalizedStream
|
||||
out.push "endstream"
|
||||
|
||||
out.push "endobj"
|
||||
fn out.join '\n'
|
||||
|
||||
add: (s) ->
|
||||
@stream ?= []
|
||||
@stream.push if Buffer.isBuffer(s) then s.toString('binary') else s
|
||||
|
||||
finalize: (compress = false, fn) ->
|
||||
# cache the finalized stream
|
||||
if @stream
|
||||
data = @stream.join '\n'
|
||||
if compress and not @data.Filter
|
||||
# create a byte array instead of passing a string to the Buffer
|
||||
# fixes a weird unicode bug.
|
||||
data = new Buffer(data.charCodeAt(i) for i in [0...data.length])
|
||||
zlib.deflate data, (err, compressedData) =>
|
||||
throw err if err
|
||||
@finalizedStream = compressedData.toString 'binary'
|
||||
@data.Filter = 'FlateDecode'
|
||||
@data.Length = @finalizedStream.length
|
||||
fn()
|
||||
else
|
||||
@finalizedStream = data
|
||||
@data.Length = @finalizedStream.length
|
||||
setImmediate fn
|
||||
if @compress
|
||||
@initDeflate() if not @deflate
|
||||
@deflate.write chunk
|
||||
else
|
||||
@finalizedStream = ''
|
||||
setImmediate fn
|
||||
@chunks.push chunk
|
||||
@data.Length += chunk.length
|
||||
|
||||
end: (chunk) ->
|
||||
if typeof chunk is 'string' or Buffer.isBuffer(chunk)
|
||||
@write chunk
|
||||
|
||||
if @deflate
|
||||
@deflate.end()
|
||||
else
|
||||
@finalize()
|
||||
|
||||
finalize: =>
|
||||
@offset = @document._offset
|
||||
|
||||
@document._write "#{@id} #{@gen} obj"
|
||||
@document._write PDFObject.convert(@data)
|
||||
|
||||
if @chunks.length
|
||||
@document._write 'stream'
|
||||
for chunk in @chunks
|
||||
@document._write chunk
|
||||
|
||||
@chunks.length = 0 # free up memory
|
||||
@document._write '\nendstream'
|
||||
|
||||
@document._write 'endobj'
|
||||
@document._refEnd(this)
|
||||
|
||||
toString: ->
|
||||
"#{@id} #{@gen} R"
|
||||
|
||||
return "#{@id} #{@gen} R"
|
||||
|
||||
module.exports = PDFReference
|
||||
PDFObject = require './object'
|
||||
|
||||
@ -1,34 +0,0 @@
|
||||
###
|
||||
PDFObjectStore - stores the object heirarchy for the PDF document
|
||||
By Devon Govett
|
||||
###
|
||||
|
||||
PDFReference = require './reference'
|
||||
|
||||
class PDFObjectStore
|
||||
constructor: ->
|
||||
@objects = {}
|
||||
@length = 0
|
||||
|
||||
@root = @ref
|
||||
Type: 'Catalog'
|
||||
Pages: @ref
|
||||
Type: 'Pages'
|
||||
Count: 0
|
||||
Kids: []
|
||||
|
||||
@pages = @root.data['Pages']
|
||||
|
||||
ref: (data) ->
|
||||
@push ++@length, data
|
||||
|
||||
push: (id, data) ->
|
||||
ref = new PDFReference(id, data)
|
||||
@objects[id] = ref
|
||||
return ref
|
||||
|
||||
addPage: (page) ->
|
||||
@pages.data['Kids'].push(page.dictionary)
|
||||
@pages.data['Count']++
|
||||
|
||||
module.exports = PDFObjectStore
|
||||
@ -9,7 +9,7 @@
|
||||
"document",
|
||||
"vector"
|
||||
],
|
||||
"version": "0.4.3",
|
||||
"version": "0.5.0",
|
||||
"homepage": "http://pdfkit.org/",
|
||||
"author": {
|
||||
"name": "Devon Govett",
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user