SkTDArray<SkPDFObject*>* targetList = getSubstituteList(firstPage);
off_t offsetSum = fileOffset;
for (int i = 0; i < targetList->count(); ++i) {
- offsetSum += setFileOffset((*targetList)[i], offsetSum);
+ offsetSum += setFileOffset((*targetList)[i], (size_t) offsetSum);
}
return offsetSum - fileOffset;
}
// Add the resources, starting at firstIndex to the catalog, removing any dupes.
// A hash table would be really nice here.
-void addResourcesToCatalog(int firstIndex, bool firstPage,
+static void addResourcesToCatalog(int firstIndex, bool firstPage,
SkTDArray<SkPDFObject*>* resourceList,
SkPDFCatalog* catalog) {
for (int i = firstIndex; i < resourceList->count(); i++) {
// Figure out the size of things and inform the catalog of file offsets.
off_t fileOffset = headerSize();
- fileOffset += fCatalog->setFileOffset(fDocCatalog.get(), fileOffset);
- fileOffset += fCatalog->setFileOffset(fPages[0], fileOffset);
- fileOffset += fPages[0]->getPageSize(fCatalog.get(), fileOffset);
+ fileOffset += fCatalog->setFileOffset(fDocCatalog.get(),
+ (size_t) fileOffset);
+ fileOffset += fCatalog->setFileOffset(fPages[0], (size_t) fileOffset);
+ fileOffset += fPages[0]->getPageSize(fCatalog.get(),
+ (size_t) fileOffset);
for (int i = 0; i < fSecondPageFirstResourceIndex; i++) {
fileOffset += fCatalog->setFileOffset(fPageResources[i],
- fileOffset);
+ (size_t) fileOffset);
}
// Add the size of resources of substitute objects used on page 1.
fileOffset += fCatalog->setSubstituteResourcesOffsets(fileOffset, true);
}
for (int i = 0; i < fPageTree.count(); i++) {
- fileOffset += fCatalog->setFileOffset(fPageTree[i], fileOffset);
+ fileOffset += fCatalog->setFileOffset(fPageTree[i],
+ (size_t) fileOffset);
}
for (int i = 1; i < fPages.count(); i++) {
- fileOffset += fPages[i]->getPageSize(fCatalog.get(), fileOffset);
+ fileOffset += fPages[i]->getPageSize(fCatalog.get(),
+ (size_t) fileOffset);
}
for (int i = fSecondPageFirstResourceIndex;
i < fPageResources.count();
i++) {
fileOffset += fCatalog->setFileOffset(fPageResources[i],
- fileOffset);
+ (size_t) fileOffset);
}
fileOffset += fCatalog->setSubstituteResourcesOffsets(fileOffset,
// For the worst case (having 65536 continuous unicode and we use every other
// one of them), the possible savings by aggressive optimization is 416KB
// pre-compressed and does not provide enough motivation for implementation.
+
+// FIXME: this should be in a header so that it is separately testable
+// ( see caller in tests/ToUnicode.cpp )
+void append_cmap_sections(const SkTDArray<SkUnichar>& glyphToUnicode,
+ const SkPDFGlyphSet* subset,
+ SkDynamicMemoryWStream* cmap);
+
void append_cmap_sections(const SkTDArray<SkUnichar>& glyphToUnicode,
const SkPDFGlyphSet* subset,
SkDynamicMemoryWStream* cmap) {
return new SkPDFStream(cmapStream.get());
}
+#if defined (SK_SFNTLY_SUBSETTER)
static void sk_delete_array(const void* ptr, size_t, void*) {
// Use C-style cast to cast away const and cast type simultaneously.
delete[] (unsigned char*)ptr;
}
+#endif
static int get_subset_font_stream(const char* fontName,
const SkTypeface* typeface,
off_t SkPDFPage::getPageSize(SkPDFCatalog* catalog, off_t fileOffset) {
SkASSERT(fContentStream.get() != NULL);
- catalog->setFileOffset(fContentStream.get(), fileOffset);
+ catalog->setFileOffset(fContentStream.get(), (size_t) fileOffset);
return fContentStream->getOutputSize(catalog, true);
}