Compare commits
70 Commits
| Author | SHA1 | Date |
|---|---|---|
|
|
0581b34525 | |
|
|
644bbe7c6d | |
|
|
1b4aa114e3 | |
|
|
552846ca2a | |
|
|
1afe429034 | |
|
|
8abd69fc24 | |
|
|
4058311831 | |
|
|
cc820b9e5d | |
|
|
8650f25331 | |
|
|
760d9eda47 | |
|
|
3c04842d97 | |
|
|
02f53e7654 | |
|
|
f160c1d44d | |
|
|
793ddaf42f | |
|
|
8abe02528b | |
|
|
f43235934b | |
|
|
d39ca4502b | |
|
|
1dc8f4e241 | |
|
|
2aa982b51d | |
|
|
e9c0f55690 | |
|
|
65b4ea623c | |
|
|
458181cd1d | |
|
|
c3b0fa30dc | |
|
|
4628be9251 | |
|
|
4c78dadd55 | |
|
|
e76a7a7c26 | |
|
|
0c20cb0be8 | |
|
|
ff7260a228 | |
|
|
4dd1e7fb15 | |
|
|
4c32c45534 | |
|
|
36dcfe99d8 | |
|
|
3ddc02b2f2 | |
|
|
57034b88b0 | |
|
|
0c5aff60b4 | |
|
|
3e4e848a09 | |
|
|
a96702987f | |
|
|
4cebd684ba | |
|
|
8563a09544 | |
|
|
fdade4502e | |
|
|
286b2d6a1c | |
|
|
57a26fabcb | |
|
|
715e8a9769 | |
|
|
4eba7b6486 | |
|
|
56efcaa925 | |
|
|
acb8f005c2 | |
|
|
216ec3c90b | |
|
|
3d1c16ca3f | |
|
|
335a86bbb2 | |
|
|
1f54108f11 | |
|
|
2aaa913c40 | |
|
|
cbda72ed54 | |
|
|
14e5569a67 | |
|
|
c647f573d8 | |
|
|
9a3865c716 | |
|
|
ec21904595 | |
|
|
f53be3c73e | |
|
|
38370cf3f4 | |
|
|
3a8f988740 | |
|
|
021be79bf7 | |
|
|
2c59e7b0f7 | |
|
|
f7c14bf1d4 | |
|
|
a2bb555292 | |
|
|
cf954a7f6d | |
|
|
0c9504e1bc | |
|
|
b5cf8a235d | |
|
|
955e4b62d0 | |
|
|
0dee385578 | |
|
|
ba5be43005 | |
|
|
5c3a686ebe | |
|
|
644e17edec |
|
|
@ -1,3 +1,2 @@
|
|||
[run]
|
||||
branch = True
|
||||
omit = camelot/ext/*
|
||||
|
|
|
|||
|
|
@ -10,20 +10,25 @@ assignees: ''
|
|||
<!-- Please read the filing issues section of the contributor's guide first: https://camelot-py.readthedocs.io/en/master/dev/contributing.html -->
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
<!-- A clear and concise description of what the bug is. -->
|
||||
|
||||
**Steps to reproduce the bug**
|
||||
Steps used to install `camelot`:
|
||||
1. Add step here (you can add more steps too)
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
1. Add step here (you can add more steps too)
|
||||
<!-- Steps used to install `camelot`:
|
||||
1. Add step here (you can add more steps too) -->
|
||||
|
||||
<!-- Steps to be used to reproduce behavior:
|
||||
1. Add step here (you can add more steps too) -->
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
<!-- A clear and concise description of what you expected to happen. -->
|
||||
|
||||
**Code**
|
||||
Add the Camelot code snippet that you used.
|
||||
|
||||
<!-- Add the Camelot code snippet that you used. -->
|
||||
|
||||
```
|
||||
import camelot
|
||||
|
||||
|
|
@ -31,18 +36,22 @@ import camelot
|
|||
```
|
||||
|
||||
**PDF**
|
||||
Add the PDF file that you want to extract tables from.
|
||||
|
||||
<!-- Add the PDF file that you want to extract tables from. -->
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
<!-- If applicable, add screenshots to help explain your problem. -->
|
||||
|
||||
**Environment**
|
||||
- OS: [e.g. MacOS]
|
||||
- Python version:
|
||||
- Numpy version:
|
||||
- OpenCV version:
|
||||
- Ghostscript version:
|
||||
- Camelot version:
|
||||
|
||||
- OS: [e.g. macOS]
|
||||
- Python version:
|
||||
- Numpy version:
|
||||
- OpenCV version:
|
||||
- Ghostscript version:
|
||||
- Camelot version:
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
||||
<!-- Add any other context about the problem here. -->
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
name: tests
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install camelot with dependencies
|
||||
run: |
|
||||
make install
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
make test
|
||||
|
||||
test_latest:
|
||||
name: Test on ${{ matrix.os }} with Python 3.9
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [3.9]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install camelot with dependencies
|
||||
run: |
|
||||
make install
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
make test
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
fontconfig/
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.so
|
||||
|
|
|
|||
29
.travis.yml
|
|
@ -1,29 +0,0 @@
|
|||
sudo: true
|
||||
language: python
|
||||
cache: pip
|
||||
addons:
|
||||
apt:
|
||||
update: true
|
||||
install:
|
||||
- make install
|
||||
jobs:
|
||||
include:
|
||||
- stage: test
|
||||
script:
|
||||
- make test
|
||||
python: '3.6'
|
||||
- stage: test
|
||||
script:
|
||||
- make test
|
||||
python: '3.7'
|
||||
dist: xenial
|
||||
- stage: test
|
||||
script:
|
||||
- make test
|
||||
python: '3.8'
|
||||
dist: xenial
|
||||
- stage: coverage
|
||||
python: '3.8'
|
||||
script:
|
||||
- make test
|
||||
- codecov --verbose
|
||||
42
HISTORY.md
|
|
@ -4,6 +4,48 @@ Release History
|
|||
master
|
||||
------
|
||||
|
||||
0.10.1 (2021-07-11)
|
||||
------------------
|
||||
|
||||
- Change extra requirements from `cv` to `base`. You can use `pip install "camelot-py[base]"` to install everything required to run camelot.
|
||||
|
||||
0.10.0 (2021-07-11)
|
||||
------------------
|
||||
|
||||
**Improvements**
|
||||
|
||||
- Add support for multiple image conversion backends. [#198](https://github.com/camelot-dev/camelot/pull/198) and [#253](https://github.com/camelot-dev/camelot/pull/253) by Vinayak Mehta.
|
||||
- Add markdown export format. [#222](https://github.com/camelot-dev/camelot/pull/222/) by [Lucas Cimon](https://github.com/Lucas-C).
|
||||
|
||||
**Documentation**
|
||||
|
||||
- Add faq section. [#216](https://github.com/camelot-dev/camelot/pull/216) by [Stefano Fiorucci](https://github.com/anakin87).
|
||||
|
||||
0.9.0 (2021-06-15)
|
||||
------------------
|
||||
|
||||
**Bugfixes**
|
||||
|
||||
- Fix use of resolution argument to generate image with ghostscript. [#231](https://github.com/camelot-dev/camelot/pull/231) by [Tiago Samaha Cordeiro](https://github.com/tiagosamaha).
|
||||
- [#15](https://github.com/camelot-dev/camelot/issues/15) Fix duplicate strings being assigned to the same cell. [#206](https://github.com/camelot-dev/camelot/pull/206) by [Eduardo Gonzalez Lopez de Murillas](https://github.com/edugonza).
|
||||
- Save plot when filename is specified. [#121](https://github.com/camelot-dev/camelot/pull/121) by [Jens Diemer](https://github.com/jedie).
|
||||
- Close file streams explicitly. [#202](https://github.com/camelot-dev/camelot/pull/202) by [Martin Abente Lahaye](https://github.com/tchx84).
|
||||
- Use correct re.sub signature. [#186](https://github.com/camelot-dev/camelot/pull/186) by [pevisscher](https://github.com/pevisscher).
|
||||
- [#183](https://github.com/camelot-dev/camelot/issues/183) Fix UnicodeEncodeError when using Stream flavor by adding encoding kwarg to `to_html`. [#188](https://github.com/camelot-dev/camelot/pull/188) by [Stefano Fiorucci](https://github.com/anakin87).
|
||||
- [#179](https://github.com/camelot-dev/camelot/issues/179) Fix `max() arg is an empty sequence` error on PDFs with blank pages. [#189](https://github.com/camelot-dev/camelot/pull/189) by Vinayak Mehta.
|
||||
|
||||
**Improvements**
|
||||
|
||||
- Add `line_overlap` and `boxes_flow` to `LAParams`. [#219](https://github.com/camelot-dev/camelot/pull/219) by [Arnie97](https://github.com/Arnie97).
|
||||
- [Add bug report template.](https://github.com/camelot-dev/camelot/commit/0a3944e54d133b701edfe9c7546ff11289301ba8)
|
||||
- Move from [Travis to GitHub Actions](https://github.com/camelot-dev/camelot/pull/241).
|
||||
- Update `.readthedocs.yml` and [remove requirements.txt](https://github.com/camelot-dev/camelot/commit/7ab5db39d07baa4063f975e9e00f6073340e04c1#diff-cde814ef2f549dc093f5b8fc533b7e8f47e7b32a8081e0760e57d5c25a1139d9)
|
||||
|
||||
**Documentation**
|
||||
|
||||
- [#193](https://github.com/camelot-dev/camelot/issues/193) Add better checks to confirm proper installation of ghostscript. [#196](https://github.com/camelot-dev/camelot/pull/196) by [jimhall](https://github.com/jimhall).
|
||||
- Update `advanced.rst` plotting examples. [#119](https://github.com/camelot-dev/camelot/pull/119) by [Jens Diemer](https://github.com/jedie).
|
||||
|
||||
0.8.2 (2020-07-27)
|
||||
------------------
|
||||
|
||||
|
|
|
|||
2
LICENSE
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2019-2020 Camelot Developers
|
||||
Copyright (c) 2019-2021 Camelot Developers
|
||||
Copyright (c) 2018-2019 Peeply Private Ltd (Singapore)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
|
|
|
|||
21
README.md
|
|
@ -4,11 +4,10 @@
|
|||
|
||||
# Camelot: PDF Table Extraction for Humans
|
||||
|
||||
[](https://travis-ci.org/camelot-dev/camelot) [](https://camelot-py.readthedocs.io/en/master/)
|
||||
[](https://github.com/camelot-dev/camelot/actions/workflows/tests.yml) [](https://camelot-py.readthedocs.io/en/master/)
|
||||
[](https://codecov.io/github/camelot-dev/camelot?branch=master)
|
||||
[](https://pypi.org/project/camelot-py/) [](https://pypi.org/project/camelot-py/) [](https://pypi.org/project/camelot-py/) [](https://gitter.im/camelot-dev/Lobby)
|
||||
[](https://github.com/ambv/black) [](https://deepsource.io/gh/camelot-dev/camelot/?ref=repository-badge)
|
||||
|
||||
[](https://github.com/ambv/black)
|
||||
|
||||
**Camelot** is a Python library that can help you extract tables from PDFs!
|
||||
|
||||
|
|
@ -23,7 +22,7 @@
|
|||
>>> tables = camelot.read_pdf('foo.pdf')
|
||||
>>> tables
|
||||
<TableList n=1>
|
||||
>>> tables.export('foo.csv', f='csv', compress=True) # json, excel, html, sqlite
|
||||
>>> tables.export('foo.csv', f='csv', compress=True) # json, excel, html, markdown, sqlite
|
||||
>>> tables[0]
|
||||
<Table shape=(7, 7)>
|
||||
>>> tables[0].parsing_report
|
||||
|
|
@ -33,7 +32,7 @@
|
|||
'order': 1,
|
||||
'page': 1
|
||||
}
|
||||
>>> tables[0].to_csv('foo.csv') # to_json, to_excel, to_html, to_sqlite
|
||||
>>> tables[0].to_csv('foo.csv') # to_json, to_excel, to_html, to_markdown, to_sqlite
|
||||
>>> tables[0].df # get a pandas DataFrame!
|
||||
</pre>
|
||||
|
||||
|
|
@ -50,11 +49,13 @@ Camelot also comes packaged with a [command-line interface](https://camelot-py.r
|
|||
|
||||
**Note:** Camelot only works with text-based PDFs and not scanned documents. (As Tabula [explains](https://github.com/tabulapdf/tabula#why-tabula), "If you can click and drag to select text in your table in a PDF viewer, then your PDF is text-based".)
|
||||
|
||||
You can check out some frequently asked questions [here](https://camelot-py.readthedocs.io/en/master/user/faq.html).
|
||||
|
||||
## Why Camelot?
|
||||
|
||||
- **Configurability**: Camelot gives you control over the table extraction process with its [tweakable settings](https://camelot-py.readthedocs.io/en/master/user/advanced.html).
|
||||
- **Metrics**: Bad tables can be discarded based on metrics like accuracy and whitespace, without having to manually look at each table.
|
||||
- **Output**: Each table is extracted into a **pandas DataFrame**, which seamlessly integrates into [ETL and data analysis workflows](https://gist.github.com/vinayak-mehta/e5949f7c2410a0e12f25d3682dc9e873). You can also export tables to multiple formats, which include CSV, JSON, Excel, HTML and Sqlite.
|
||||
- **Configurability**: Camelot gives you control over the table extraction process with [tweakable settings](https://camelot-py.readthedocs.io/en/master/user/advanced.html).
|
||||
- **Metrics**: You can discard bad tables based on metrics like accuracy and whitespace, without having to manually look at each table.
|
||||
- **Output**: Each table is extracted into a **pandas DataFrame**, which seamlessly integrates into [ETL and data analysis workflows](https://gist.github.com/vinayak-mehta/e5949f7c2410a0e12f25d3682dc9e873). You can also export tables to multiple formats, which include CSV, JSON, Excel, HTML, Markdown, and Sqlite.
|
||||
|
||||
See [comparison with similar libraries and tools](https://github.com/camelot-dev/camelot/wiki/Comparison-with-other-PDF-Table-Extraction-libraries-and-tools).
|
||||
|
||||
|
|
@ -77,7 +78,7 @@ $ conda install -c conda-forge camelot-py
|
|||
After [installing the dependencies](https://camelot-py.readthedocs.io/en/master/user/install-deps.html) ([tk](https://packages.ubuntu.com/bionic/python/python-tk) and [ghostscript](https://www.ghostscript.com/)), you can also just use pip to install Camelot:
|
||||
|
||||
<pre>
|
||||
$ pip install "camelot-py[cv]"
|
||||
$ pip install "camelot-py[base]"
|
||||
</pre>
|
||||
|
||||
### From the source code
|
||||
|
|
@ -92,7 +93,7 @@ and install Camelot using pip:
|
|||
|
||||
<pre>
|
||||
$ cd camelot
|
||||
$ pip install ".[cv]"
|
||||
$ pip install ".[base]"
|
||||
</pre>
|
||||
|
||||
## Documentation
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
VERSION = (0, 8, 2)
|
||||
VERSION = (0, 10, 1)
|
||||
PRERELEASE = None # alpha, beta or rc
|
||||
REVISION = None
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .image_conversion import ImageConversionBackend
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import ctypes
|
||||
from ctypes.util import find_library
|
||||
|
||||
|
||||
def installed_posix():
|
||||
library = find_library("gs")
|
||||
return library is not None
|
||||
|
||||
|
||||
def installed_windows():
|
||||
library = find_library(
|
||||
"".join(("gsdll", str(ctypes.sizeof(ctypes.c_voidp) * 8), ".dll"))
|
||||
)
|
||||
return library is not None
|
||||
|
||||
|
||||
class GhostscriptBackend(object):
|
||||
def installed(self):
|
||||
if sys.platform in ["linux", "darwin"]:
|
||||
return installed_posix()
|
||||
elif sys.platform == "win32":
|
||||
return installed_windows()
|
||||
else:
|
||||
return installed_posix()
|
||||
|
||||
def convert(self, pdf_path, png_path, resolution=300):
|
||||
if not self.installed():
|
||||
raise OSError(
|
||||
"Ghostscript is not installed. You can install it using the instructions"
|
||||
" here: https://camelot-py.readthedocs.io/en/master/user/install-deps.html"
|
||||
)
|
||||
|
||||
import ghostscript
|
||||
|
||||
gs_command = [
|
||||
"gs",
|
||||
"-q",
|
||||
"-sDEVICE=png16m",
|
||||
"-o",
|
||||
png_path,
|
||||
f"-r{resolution}",
|
||||
pdf_path,
|
||||
]
|
||||
ghostscript.Ghostscript(*gs_command)
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .poppler_backend import PopplerBackend
|
||||
from .ghostscript_backend import GhostscriptBackend
|
||||
|
||||
BACKENDS = {"poppler": PopplerBackend, "ghostscript": GhostscriptBackend}
|
||||
|
||||
|
||||
class ImageConversionBackend(object):
|
||||
def __init__(self, backend="poppler", use_fallback=True):
|
||||
if backend not in BACKENDS.keys():
|
||||
raise ValueError(f"Image conversion backend '{backend}' not supported")
|
||||
|
||||
self.backend = backend
|
||||
self.use_fallback = use_fallback
|
||||
self.fallbacks = list(filter(lambda x: x != backend, BACKENDS.keys()))
|
||||
|
||||
def convert(self, pdf_path, png_path):
|
||||
try:
|
||||
converter = BACKENDS[self.backend]()
|
||||
converter.convert(pdf_path, png_path)
|
||||
except Exception as e:
|
||||
import sys
|
||||
|
||||
if self.use_fallback:
|
||||
for fallback in self.fallbacks:
|
||||
try:
|
||||
converter = BACKENDS[fallback]()
|
||||
converter.convert(pdf_path, png_path)
|
||||
except Exception as e:
|
||||
raise type(e)(
|
||||
str(e) + f" with image conversion backend '{fallback}'"
|
||||
).with_traceback(sys.exc_info()[2])
|
||||
continue
|
||||
else:
|
||||
break
|
||||
else:
|
||||
raise type(e)(
|
||||
str(e) + f" with image conversion backend '{self.backend}'"
|
||||
).with_traceback(sys.exc_info()[2])
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
|
||||
class PopplerBackend(object):
|
||||
def convert(self, pdf_path, png_path):
|
||||
pdftopng_executable = shutil.which("pdftopng")
|
||||
if pdftopng_executable is None:
|
||||
raise OSError(
|
||||
"pdftopng is not installed. You can install it using the 'pip install pdftopng' command."
|
||||
)
|
||||
|
||||
pdftopng_command = [pdftopng_executable, pdf_path, png_path]
|
||||
|
||||
try:
|
||||
subprocess.check_output(
|
||||
" ".join(pdftopng_command), stderr=subprocess.STDOUT, shell=True
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise ValueError(e.output)
|
||||
|
|
@ -43,7 +43,7 @@ pass_config = click.make_pass_decorator(Config)
|
|||
@click.option(
|
||||
"-f",
|
||||
"--format",
|
||||
type=click.Choice(["csv", "json", "excel", "html", "sqlite"]),
|
||||
type=click.Choice(["csv", "excel", "html", "json", "markdown", "sqlite"]),
|
||||
help="Output file format.",
|
||||
)
|
||||
@click.option("-z", "--zip", is_flag=True, help="Create ZIP archive.")
|
||||
|
|
|
|||
|
|
@ -55,7 +55,9 @@ class TextEdge(object):
|
|||
x = round(self.x, 2)
|
||||
y0 = round(self.y0, 2)
|
||||
y1 = round(self.y1, 2)
|
||||
return f"<TextEdge x={x} y0={y0} y1={y1} align={self.align} valid={self.is_valid}>"
|
||||
return (
|
||||
f"<TextEdge x={x} y0={y0} y1={y1} align={self.align} valid={self.is_valid}>"
|
||||
)
|
||||
|
||||
def update_coords(self, x, y0, edge_tol=50):
|
||||
"""Updates the text edge's x and bottom y coordinates and sets
|
||||
|
|
@ -102,8 +104,7 @@ class TextEdges(object):
|
|||
return None
|
||||
|
||||
def add(self, textline, align):
|
||||
"""Adds a new text edge to the current dict.
|
||||
"""
|
||||
"""Adds a new text edge to the current dict."""
|
||||
x = self.get_x_coord(textline, align)
|
||||
y0 = textline.y0
|
||||
y1 = textline.y1
|
||||
|
|
@ -111,8 +112,7 @@ class TextEdges(object):
|
|||
self._textedges[align].append(te)
|
||||
|
||||
def update(self, textline):
|
||||
"""Updates an existing text edge in the current dict.
|
||||
"""
|
||||
"""Updates an existing text edge in the current dict."""
|
||||
for align in ["left", "right", "middle"]:
|
||||
x_coord = self.get_x_coord(textline, align)
|
||||
idx = self.find(x_coord, align)
|
||||
|
|
@ -288,10 +288,10 @@ class Cell(object):
|
|||
self._text = ""
|
||||
|
||||
def __repr__(self):
|
||||
x1 = round(self.x1, 2)
|
||||
y1 = round(self.y1, 2)
|
||||
x2 = round(self.x2, 2)
|
||||
y2 = round(self.y2, 2)
|
||||
x1 = round(self.x1)
|
||||
y1 = round(self.y1)
|
||||
x2 = round(self.x2)
|
||||
y2 = round(self.y2)
|
||||
return f"<Cell x1={x1} y1={y1} x2={x2} y2={y2}>"
|
||||
|
||||
@property
|
||||
|
|
@ -304,8 +304,7 @@ class Cell(object):
|
|||
|
||||
@property
|
||||
def bound(self):
|
||||
"""The number of sides on which the cell is bounded.
|
||||
"""
|
||||
"""The number of sides on which the cell is bounded."""
|
||||
return self.top + self.bottom + self.left + self.right
|
||||
|
||||
|
||||
|
|
@ -361,8 +360,7 @@ class Table(object):
|
|||
|
||||
@property
|
||||
def data(self):
|
||||
"""Returns two-dimensional list of strings in table.
|
||||
"""
|
||||
"""Returns two-dimensional list of strings in table."""
|
||||
d = []
|
||||
for row in self.cells:
|
||||
d.append([cell.text.strip() for cell in row])
|
||||
|
|
@ -383,8 +381,7 @@ class Table(object):
|
|||
return report
|
||||
|
||||
def set_all_edges(self):
|
||||
"""Sets all table edges to True.
|
||||
"""
|
||||
"""Sets all table edges to True."""
|
||||
for row in self.cells:
|
||||
for cell in row:
|
||||
cell.left = cell.right = cell.top = cell.bottom = True
|
||||
|
|
@ -526,8 +523,7 @@ class Table(object):
|
|||
return self
|
||||
|
||||
def set_border(self):
|
||||
"""Sets table border edges to True.
|
||||
"""
|
||||
"""Sets table border edges to True."""
|
||||
for r in range(len(self.rows)):
|
||||
self.cells[r][0].left = True
|
||||
self.cells[r][len(self.cols) - 1].right = True
|
||||
|
|
@ -634,6 +630,21 @@ class Table(object):
|
|||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.write(html_string)
|
||||
|
||||
def to_markdown(self, path, **kwargs):
|
||||
"""Writes Table to a Markdown file.
|
||||
|
||||
For kwargs, check :meth:`pandas.DataFrame.to_markdown`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : str
|
||||
Output filepath.
|
||||
|
||||
"""
|
||||
md_string = self.df.to_markdown(**kwargs)
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.write(md_string)
|
||||
|
||||
def to_sqlite(self, path, **kwargs):
|
||||
"""Writes Table to sqlite database.
|
||||
|
||||
|
|
@ -715,7 +726,7 @@ class TableList(object):
|
|||
path : str
|
||||
Output filepath.
|
||||
f : str
|
||||
File format. Can be csv, json, excel, html and sqlite.
|
||||
File format. Can be csv, excel, html, json, markdown or sqlite.
|
||||
compress : bool
|
||||
Whether or not to add files to a ZIP archive.
|
||||
|
||||
|
|
@ -728,7 +739,7 @@ class TableList(object):
|
|||
|
||||
kwargs = {"path": path, "dirname": dirname, "root": root, "ext": ext}
|
||||
|
||||
if f in ["csv", "json", "html"]:
|
||||
if f in ["csv", "html", "json", "markdown"]:
|
||||
self._write_file(f=f, **kwargs)
|
||||
if compress:
|
||||
self._compress_dir(**kwargs)
|
||||
|
|
|
|||
|
|
@ -1,674 +0,0 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
||||
|
|
@ -1,99 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
ghostscript - A Python interface for the Ghostscript interpreter C-API
|
||||
"""
|
||||
#
|
||||
# Modifications 2018 by Vinayak Mehta <vmehta94@gmail.com>
|
||||
# Copyright 2010-2018 by Hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
from . import _gsprint as gs
|
||||
|
||||
|
||||
__author__ = "Hartmut Goebel <h.goebel@crazy-compilers.com>"
|
||||
__copyright__ = "Copyright 2010-2018 by Hartmut Goebel <h.goebel@crazy-compilers.com>"
|
||||
__license__ = "GNU General Public License version 3 (GPL v3)"
|
||||
__version__ = "0.6"
|
||||
|
||||
|
||||
class __Ghostscript(object):
|
||||
def __init__(self, instance, args, stdin=None, stdout=None, stderr=None):
|
||||
self._initialized = False
|
||||
self._callbacks = None
|
||||
if stdin or stdout or stderr:
|
||||
self.set_stdio(stdin, stdout, stderr)
|
||||
rc = gs.init_with_args(instance, args)
|
||||
self._initialized = True
|
||||
if rc == gs.e_Quit:
|
||||
self.exit()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
self.exit()
|
||||
|
||||
def set_stdio(self, stdin=None, stdout=None, stderr=None):
|
||||
"""Set stdin, stdout and stderr of the ghostscript interpreter.
|
||||
|
||||
The ``stdin`` stream has to support the ``readline()``
|
||||
interface. The ``stdout`` and ``stderr`` streams have to
|
||||
support the ``write()`` and ``flush()`` interface.
|
||||
|
||||
Please note that this does not affect the input- and output-
|
||||
streams of the devices. Esp. setting stdout does not allow
|
||||
catching the devise-output even when using ``-sOutputFile=-``.
|
||||
|
||||
"""
|
||||
global __instance__
|
||||
self._callbacks = (
|
||||
stdin and gs._wrap_stdin(stdin) or None,
|
||||
stdout and gs._wrap_stdout(stdout) or None,
|
||||
stderr and gs._wrap_stderr(stderr) or None,
|
||||
)
|
||||
gs.set_stdio(__instance__, *self._callbacks)
|
||||
|
||||
def __del__(self):
|
||||
self.exit()
|
||||
|
||||
def exit(self):
|
||||
global __instance__
|
||||
if self._initialized:
|
||||
if __instance__ is not None:
|
||||
gs.exit(__instance__)
|
||||
gs.delete_instance(__instance__)
|
||||
__instance__ = None
|
||||
self._initialized = False
|
||||
|
||||
|
||||
def Ghostscript(*args, **kwargs):
|
||||
"""Factory function for setting up a Ghostscript instance
|
||||
"""
|
||||
global __instance__
|
||||
# Ghostscript only supports a single instance
|
||||
if __instance__ is None:
|
||||
__instance__ = gs.new_instance()
|
||||
return __Ghostscript(
|
||||
__instance__,
|
||||
args,
|
||||
stdin=kwargs.get("stdin", None),
|
||||
stdout=kwargs.get("stdout", None),
|
||||
stderr=kwargs.get("stderr", None),
|
||||
)
|
||||
|
||||
|
||||
__instance__ = None
|
||||
|
|
@ -1,270 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
ghostscript._gsprint - A low-level interface to the Ghostscript C-API using ctypes
|
||||
"""
|
||||
#
|
||||
# Modifications 2018 by Vinayak Mehta <vmehta94@gmail.com>
|
||||
# Copyright 2010-2018 by Hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
# Display_callback Structure by Lasse Fister <commander@graphicore.de> in 2013
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
import sys
|
||||
from ctypes import *
|
||||
|
||||
|
||||
# base/gserrors.h
|
||||
#
|
||||
# Internal code for a normal exit when usage info is displayed.
|
||||
# This allows Window versions of Ghostscript to pause until
|
||||
# the message can be read.
|
||||
#
|
||||
e_Info = -110
|
||||
|
||||
#
|
||||
# Internal code for the .quit operator.
|
||||
# The real quit code is an integer on the operand stack.
|
||||
# gs_interpret returns this only for a .quit with a zero exit code.
|
||||
#
|
||||
e_Quit = -101
|
||||
|
||||
__author__ = "Hartmut Goebel <h.goebel@crazy-compilers.com>"
|
||||
__copyright__ = "Copyright 2010-2018 by Hartmut Goebel <h.goebel@crazy-compilers.com>"
|
||||
__license__ = "GNU General Public License version 3 (GPL v3)"
|
||||
__version__ = "0.6"
|
||||
|
||||
gs_main_instance = c_void_p
|
||||
display_callback = c_void_p
|
||||
|
||||
# https://www.ghostscript.com/doc/current/API.htm
|
||||
|
||||
|
||||
class GhostscriptError(Exception):
|
||||
def __init__(self, ecode):
|
||||
self.code = ecode
|
||||
|
||||
|
||||
def new_instance():
|
||||
"""Create a new instance of Ghostscript
|
||||
|
||||
This instance is passed to most other API functions.
|
||||
"""
|
||||
# :todo: The caller_handle will be provided to callback functions.
|
||||
display_callback = None
|
||||
instance = gs_main_instance()
|
||||
rc = libgs.gsapi_new_instance(pointer(instance), display_callback)
|
||||
if rc != 0:
|
||||
raise GhostscriptError(rc)
|
||||
return instance
|
||||
|
||||
|
||||
def delete_instance(instance):
|
||||
"""Destroy an instance of Ghostscript
|
||||
|
||||
Before you call this, Ghostscript must have finished.
|
||||
If Ghostscript has been initialised, you must call exit()
|
||||
before delete_instance()
|
||||
"""
|
||||
return libgs.gsapi_delete_instance(instance)
|
||||
|
||||
|
||||
if sys.platform == "win32":
|
||||
c_stdstream_call_t = WINFUNCTYPE(c_int, gs_main_instance, POINTER(c_char), c_int)
|
||||
else:
|
||||
c_stdstream_call_t = CFUNCTYPE(c_int, gs_main_instance, POINTER(c_char), c_int)
|
||||
|
||||
|
||||
def _wrap_stdin(infp):
|
||||
"""Wrap a filehandle into a C function to be used as `stdin` callback
|
||||
for ``set_stdio``. The filehandle has to support the readline() method.
|
||||
"""
|
||||
|
||||
def _wrap(instance, dest, count):
|
||||
try:
|
||||
data = infp.readline(count)
|
||||
except:
|
||||
count = -1
|
||||
else:
|
||||
if not data:
|
||||
count = 0
|
||||
else:
|
||||
count = len(data)
|
||||
memmove(dest, c_char_p(data), count)
|
||||
return count
|
||||
|
||||
return c_stdstream_call_t(_wrap)
|
||||
|
||||
|
||||
def _wrap_stdout(outfp):
|
||||
"""Wrap a filehandle into a C function to be used as `stdout` or
|
||||
`stderr` callback for ``set_stdio``. The filehandle has to support the
|
||||
write() and flush() methods.
|
||||
"""
|
||||
|
||||
def _wrap(instance, str, count):
|
||||
outfp.write(str[:count])
|
||||
outfp.flush()
|
||||
return count
|
||||
|
||||
return c_stdstream_call_t(_wrap)
|
||||
|
||||
|
||||
_wrap_stderr = _wrap_stdout
|
||||
|
||||
|
||||
def set_stdio(instance, stdin, stdout, stderr):
|
||||
"""Set the callback functions for stdio.
|
||||
|
||||
``stdin``, ``stdout`` and ``stderr`` have to be ``ctypes``
|
||||
callback functions matching the ``_gsprint.c_stdstream_call_t``
|
||||
prototype. You may want to use _wrap_* to wrap file handles.
|
||||
|
||||
Note 1: This function only changes stdio of the Postscript
|
||||
interpreter, not that of the devices.
|
||||
|
||||
Note 2: Make sure you keep references to C function objects
|
||||
as long as they are used from C code. Otherwise they may be
|
||||
garbage collected, crashing your program when a callback is made.
|
||||
|
||||
The ``stdin`` callback function should return the number of
|
||||
characters read, `0` for EOF, or `-1` for error. The `stdout` and
|
||||
`stderr` callback functions should return the number of characters
|
||||
written.
|
||||
|
||||
You may pass ``None`` for any of stdin, stdout or stderr , in which
|
||||
case the system stdin, stdout resp. stderr will be used.
|
||||
"""
|
||||
rc = libgs.gsapi_set_stdio(instance, stdin, stdout, stderr)
|
||||
if rc not in (0, e_Quit, e_Info):
|
||||
raise GhostscriptError(rc)
|
||||
return rc
|
||||
|
||||
|
||||
def init_with_args(instance, argv):
|
||||
"""Initialise the interpreter
|
||||
|
||||
1. If quit or EOF occur during init_with_args(), the return value
|
||||
will be e_Quit. This is not an error. You must call exit() and
|
||||
must not call any other functions.
|
||||
|
||||
2. If usage info should be displayed, the return value will be
|
||||
e_Info which is not an error. Do not call exit().
|
||||
|
||||
3. Under normal conditions this returns 0. You would then call one
|
||||
or more run_*() functions and then finish with exit()
|
||||
"""
|
||||
ArgArray = c_char_p * len(argv)
|
||||
c_argv = ArgArray(*argv)
|
||||
rc = libgs.gsapi_init_with_args(instance, len(argv), c_argv)
|
||||
if rc not in (0, e_Quit, e_Info):
|
||||
raise GhostscriptError(rc)
|
||||
return rc
|
||||
|
||||
|
||||
def exit(instance):
|
||||
"""Exit the interpreter
|
||||
|
||||
This must be called on shutdown if init_with_args() has been
|
||||
called, and just before delete_instance()
|
||||
"""
|
||||
rc = libgs.gsapi_exit(instance)
|
||||
if rc != 0:
|
||||
raise GhostscriptError(rc)
|
||||
return rc
|
||||
|
||||
|
||||
def __win32_finddll():
|
||||
try:
|
||||
import winreg
|
||||
except ImportError:
|
||||
# assume Python 2
|
||||
from _winreg import (
|
||||
OpenKey,
|
||||
CloseKey,
|
||||
EnumKey,
|
||||
QueryValueEx,
|
||||
QueryInfoKey,
|
||||
HKEY_LOCAL_MACHINE,
|
||||
)
|
||||
else:
|
||||
from winreg import (
|
||||
OpenKey,
|
||||
CloseKey,
|
||||
EnumKey,
|
||||
QueryValueEx,
|
||||
QueryInfoKey,
|
||||
HKEY_LOCAL_MACHINE,
|
||||
)
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
import os
|
||||
|
||||
dlls = []
|
||||
# Look up different variants of Ghostscript and take the highest
|
||||
# version for which the DLL is to be found in the filesystem.
|
||||
for key_name in (
|
||||
"AFPL Ghostscript",
|
||||
"Aladdin Ghostscript",
|
||||
"GNU Ghostscript",
|
||||
"GPL Ghostscript",
|
||||
):
|
||||
try:
|
||||
k1 = OpenKey(HKEY_LOCAL_MACHINE, "Software\\%s" % key_name)
|
||||
for num in range(0, QueryInfoKey(k1)[0]):
|
||||
version = EnumKey(k1, num)
|
||||
try:
|
||||
k2 = OpenKey(k1, version)
|
||||
dll_path = QueryValueEx(k2, "GS_DLL")[0]
|
||||
CloseKey(k2)
|
||||
if os.path.exists(dll_path):
|
||||
dlls.append((LooseVersion(version), dll_path))
|
||||
except WindowsError:
|
||||
pass
|
||||
CloseKey(k1)
|
||||
except WindowsError:
|
||||
pass
|
||||
if dlls:
|
||||
dlls.sort()
|
||||
return dlls[-1][-1]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
if sys.platform == "win32":
|
||||
libgs = __win32_finddll()
|
||||
if not libgs:
|
||||
import ctypes.util
|
||||
|
||||
libgs = ctypes.util.find_library(
|
||||
"".join(("gsdll", str(ctypes.sizeof(ctypes.c_voidp) * 8), ".dll"))
|
||||
) # finds in %PATH%
|
||||
if not libgs:
|
||||
raise RuntimeError("Please make sure that Ghostscript is installed")
|
||||
libgs = windll.LoadLibrary(libgs)
|
||||
else:
|
||||
try:
|
||||
libgs = cdll.LoadLibrary("libgs.so")
|
||||
except OSError:
|
||||
# shared object file not found
|
||||
import ctypes.util
|
||||
|
||||
libgs = ctypes.util.find_library("gs")
|
||||
if not libgs:
|
||||
raise RuntimeError("Please make sure that Ghostscript is installed")
|
||||
libgs = cdll.LoadLibrary(libgs)
|
||||
|
||||
del __win32_finddll
|
||||
|
|
@ -6,7 +6,7 @@ import sys
|
|||
from PyPDF2 import PdfFileReader, PdfFileWriter
|
||||
|
||||
from .core import TableList
|
||||
from .parsers import Lattice, Stream, LatticeOCR, StreamOCR
|
||||
from .parsers import Stream, Lattice
|
||||
from .utils import (
|
||||
TemporaryDirectory,
|
||||
get_page_layout,
|
||||
|
|
@ -38,8 +38,8 @@ class PDFHandler(object):
|
|||
if is_url(filepath):
|
||||
filepath = download_url(filepath)
|
||||
self.filepath = filepath
|
||||
if not filepath.lower().endswith(".pdf"):
|
||||
raise NotImplementedError("File format not supported")
|
||||
# if not filepath.lower().endswith(".pdf"):
|
||||
# raise NotImplementedError("File format not supported")
|
||||
|
||||
if password is None:
|
||||
self.password = ""
|
||||
|
|
@ -47,9 +47,9 @@ class PDFHandler(object):
|
|||
self.password = password
|
||||
if sys.version_info[0] < 3:
|
||||
self.password = self.password.encode("ascii")
|
||||
self.pages = self._get_pages(self.filepath, pages)
|
||||
self.pages = self._get_pages(pages)
|
||||
|
||||
def _get_pages(self, filepath, pages):
|
||||
def _get_pages(self, pages):
|
||||
"""Converts pages string to list of ints.
|
||||
|
||||
Parameters
|
||||
|
|
@ -67,13 +67,16 @@ class PDFHandler(object):
|
|||
|
||||
"""
|
||||
page_numbers = []
|
||||
|
||||
if pages == "1":
|
||||
page_numbers.append({"start": 1, "end": 1})
|
||||
else:
|
||||
instream = open(filepath, "rb")
|
||||
infile = PdfFileReader(instream, strict=False)
|
||||
with open(self.filepath, "rb") as f:
|
||||
infile = PdfFileReader(f, strict=False)
|
||||
|
||||
if infile.isEncrypted:
|
||||
infile.decrypt(self.password)
|
||||
|
||||
if pages == "all":
|
||||
page_numbers.append({"start": 1, "end": infile.getNumPages()})
|
||||
else:
|
||||
|
|
@ -85,7 +88,7 @@ class PDFHandler(object):
|
|||
page_numbers.append({"start": int(a), "end": int(b)})
|
||||
else:
|
||||
page_numbers.append({"start": int(r), "end": int(r)})
|
||||
instream.close()
|
||||
|
||||
P = []
|
||||
for p in page_numbers:
|
||||
P.extend(range(p["start"], p["end"] + 1))
|
||||
|
|
@ -163,19 +166,12 @@ class PDFHandler(object):
|
|||
List of tables found in PDF.
|
||||
|
||||
"""
|
||||
parsers = {
|
||||
"lattice": Lattice,
|
||||
"stream": Stream,
|
||||
"lattice_ocr": LatticeOCR,
|
||||
"stream_ocr": StreamOCR,
|
||||
}
|
||||
|
||||
tables = []
|
||||
with TemporaryDirectory() as tempdir:
|
||||
for p in self.pages:
|
||||
self._save_page(self.filepath, p, tempdir)
|
||||
pages = [os.path.join(tempdir, f"page-{p}.pdf") for p in self.pages]
|
||||
parser = parsers[flavor](**kwargs)
|
||||
parser = Lattice(**kwargs) if flavor == "lattice" else Stream(**kwargs)
|
||||
for p in pages:
|
||||
t = parser.extract_tables(
|
||||
p, suppress_stdout=suppress_stdout, layout_kwargs=layout_kwargs
|
||||
|
|
|
|||
|
|
@ -98,10 +98,9 @@ def read_pdf(
|
|||
tables : camelot.core.TableList
|
||||
|
||||
"""
|
||||
if flavor not in ["lattice", "stream", "lattice_ocr", "stream_ocr"]:
|
||||
if flavor not in ["lattice", "stream"]:
|
||||
raise NotImplementedError(
|
||||
"Unknown flavor specified. Use one of the following: "
|
||||
"'lattice', 'stream', 'lattice_ocr', 'stream_ocr'"
|
||||
"Unknown flavor specified." " Use either 'lattice' or 'stream'"
|
||||
)
|
||||
|
||||
with warnings.catch_warnings():
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .lattice import Lattice
|
||||
from .stream import Stream
|
||||
from .lattice_ocr import LatticeOCR
|
||||
from .stream_ocr import StreamOCR
|
||||
from .lattice import Lattice
|
||||
|
|
|
|||
|
|
@ -6,8 +6,7 @@ from ..utils import get_page_layout, get_text_objects
|
|||
|
||||
|
||||
class BaseParser(object):
|
||||
"""Defines a base parser.
|
||||
"""
|
||||
"""Defines a base parser."""
|
||||
|
||||
def _generate_layout(self, filename, layout_kwargs):
|
||||
self.filename = filename
|
||||
|
|
@ -18,3 +17,4 @@ class BaseParser(object):
|
|||
self.vertical_text = get_text_objects(self.layout, ltype="vertical_text")
|
||||
self.pdf_width, self.pdf_height = self.dimensions
|
||||
self.rootname, __ = os.path.splitext(self.filename)
|
||||
self.imagename = "".join([self.rootname, ".png"])
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import copy
|
|||
import locale
|
||||
import logging
|
||||
import warnings
|
||||
import subprocess
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
|
@ -29,6 +28,7 @@ from ..image_processing import (
|
|||
find_contours,
|
||||
find_joints,
|
||||
)
|
||||
from ..backends.image_conversion import BACKENDS
|
||||
|
||||
|
||||
logger = logging.getLogger("camelot")
|
||||
|
|
@ -111,7 +111,8 @@ class Lattice(BaseParser):
|
|||
threshold_constant=-2,
|
||||
iterations=0,
|
||||
resolution=300,
|
||||
**kwargs
|
||||
backend="ghostscript",
|
||||
**kwargs,
|
||||
):
|
||||
self.table_regions = table_regions
|
||||
self.table_areas = table_areas
|
||||
|
|
@ -128,6 +129,37 @@ class Lattice(BaseParser):
|
|||
self.threshold_constant = threshold_constant
|
||||
self.iterations = iterations
|
||||
self.resolution = resolution
|
||||
self.backend = Lattice._get_backend(backend)
|
||||
|
||||
@staticmethod
|
||||
def _get_backend(backend):
|
||||
def implements_convert():
|
||||
methods = [
|
||||
method for method in dir(backend) if method.startswith("__") is False
|
||||
]
|
||||
return "convert" in methods
|
||||
|
||||
if isinstance(backend, str):
|
||||
if backend not in BACKENDS.keys():
|
||||
raise NotImplementedError(
|
||||
f"Unknown backend '{backend}' specified. Please use either 'poppler' or 'ghostscript'."
|
||||
)
|
||||
|
||||
if backend == "ghostscript":
|
||||
warnings.warn(
|
||||
"'ghostscript' will be replaced by 'poppler' as the default image conversion"
|
||||
" backend in v0.12.0. You can try out 'poppler' with backend='poppler'.",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
return BACKENDS[backend]()
|
||||
else:
|
||||
if not implements_convert():
|
||||
raise NotImplementedError(
|
||||
f"'{backend}' must implement a 'convert' method"
|
||||
)
|
||||
|
||||
return backend
|
||||
|
||||
@staticmethod
|
||||
def _reduce_index(t, idx, shift_text):
|
||||
|
|
@ -207,19 +239,6 @@ class Lattice(BaseParser):
|
|||
t.cells[i][j].text = t.cells[i - 1][j].text
|
||||
return t
|
||||
|
||||
def _generate_image(self):
|
||||
from ..ext.ghostscript import Ghostscript
|
||||
|
||||
self.imagename = "".join([self.rootname, ".png"])
|
||||
gs_call = "-q -sDEVICE=png16m -o {} -r300 {}".format(
|
||||
self.imagename, self.filename
|
||||
)
|
||||
gs_call = gs_call.encode().split()
|
||||
null = open(os.devnull, "wb")
|
||||
with Ghostscript(*gs_call, stdout=null) as gs:
|
||||
pass
|
||||
null.close()
|
||||
|
||||
def _generate_table_bbox(self):
|
||||
def scale_areas(areas):
|
||||
scaled_areas = []
|
||||
|
|
@ -399,7 +418,8 @@ class Lattice(BaseParser):
|
|||
)
|
||||
return []
|
||||
|
||||
self._generate_image()
|
||||
self.backend.convert(self.filename, self.imagename)
|
||||
|
||||
self._generate_table_bbox()
|
||||
|
||||
_tables = []
|
||||
|
|
|
|||
|
|
@ -1,243 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import copy
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
try:
|
||||
import easyocr
|
||||
except ImportError:
|
||||
_HAS_EASYOCR = False
|
||||
else:
|
||||
_HAS_EASYOCR = True
|
||||
|
||||
import pandas as pd
|
||||
from PIL import Image
|
||||
|
||||
from .base import BaseParser
|
||||
from ..core import Table
|
||||
from ..utils import TemporaryDirectory, merge_close_lines, scale_pdf, segments_in_bbox
|
||||
from ..image_processing import (
|
||||
adaptive_threshold,
|
||||
find_lines,
|
||||
find_contours,
|
||||
find_joints,
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger("camelot")
|
||||
|
||||
|
||||
class LatticeOCR(BaseParser):
|
||||
def __init__(
|
||||
self,
|
||||
table_regions=None,
|
||||
table_areas=None,
|
||||
line_scale=15,
|
||||
line_tol=2,
|
||||
joint_tol=2,
|
||||
threshold_blocksize=15,
|
||||
threshold_constant=-2,
|
||||
iterations=0,
|
||||
resolution=300,
|
||||
):
|
||||
self.table_regions = table_regions
|
||||
self.table_areas = table_areas
|
||||
self.line_scale = line_scale
|
||||
self.line_tol = line_tol
|
||||
self.joint_tol = joint_tol
|
||||
self.threshold_blocksize = threshold_blocksize
|
||||
self.threshold_constant = threshold_constant
|
||||
self.iterations = iterations
|
||||
self.resolution = resolution
|
||||
|
||||
if _HAS_EASYOCR:
|
||||
self.reader = easyocr.Reader(['en'], gpu=False)
|
||||
else:
|
||||
raise ImportError("easyocr is required to run OCR on image-based PDFs.")
|
||||
|
||||
def _generate_image(self):
|
||||
from ..ext.ghostscript import Ghostscript
|
||||
|
||||
self.imagename = "".join([self.rootname, ".png"])
|
||||
gs_call = "-q -sDEVICE=png16m -o {} -r900 {}".format(
|
||||
self.imagename, self.filename
|
||||
)
|
||||
gs_call = gs_call.encode().split()
|
||||
null = open(os.devnull, "wb")
|
||||
with Ghostscript(*gs_call, stdout=null) as gs:
|
||||
pass
|
||||
null.close()
|
||||
|
||||
def _generate_table_bbox(self):
|
||||
def scale_areas(areas, scalers):
|
||||
scaled_areas = []
|
||||
for area in areas:
|
||||
x1, y1, x2, y2 = area.split(",")
|
||||
x1 = float(x1)
|
||||
y1 = float(y1)
|
||||
x2 = float(x2)
|
||||
y2 = float(y2)
|
||||
x1, y1, x2, y2 = scale_pdf((x1, y1, x2, y2), scalers)
|
||||
scaled_areas.append((x1, y1, abs(x2 - x1), abs(y2 - y1)))
|
||||
return scaled_areas
|
||||
|
||||
self.image, self.threshold = adaptive_threshold(
|
||||
self.imagename, blocksize=self.threshold_blocksize, c=self.threshold_constant
|
||||
)
|
||||
|
||||
image_width = self.image.shape[1]
|
||||
image_height = self.image.shape[0]
|
||||
image_width_scaler = image_width / float(self.pdf_width)
|
||||
image_height_scaler = image_height / float(self.pdf_height)
|
||||
image_scalers = (image_width_scaler, image_height_scaler, self.pdf_height)
|
||||
|
||||
if self.table_areas is None:
|
||||
regions = None
|
||||
if self.table_regions is not None:
|
||||
regions = scale_areas(self.table_regions, image_scalers)
|
||||
|
||||
vertical_mask, vertical_segments = find_lines(
|
||||
self.threshold,
|
||||
regions=regions,
|
||||
direction="vertical",
|
||||
line_scale=self.line_scale,
|
||||
iterations=self.iterations,
|
||||
)
|
||||
horizontal_mask, horizontal_segments = find_lines(
|
||||
self.threshold,
|
||||
regions=regions,
|
||||
direction="horizontal",
|
||||
line_scale=self.line_scale,
|
||||
iterations=self.iterations,
|
||||
)
|
||||
|
||||
contours = find_contours(vertical_mask, horizontal_mask)
|
||||
table_bbox = find_joints(contours, vertical_mask, horizontal_mask)
|
||||
else:
|
||||
vertical_mask, vertical_segments = find_lines(
|
||||
self.threshold,
|
||||
direction="vertical",
|
||||
line_scale=self.line_scale,
|
||||
iterations=self.iterations,
|
||||
)
|
||||
horizontal_mask, horizontal_segments = find_lines(
|
||||
self.threshold,
|
||||
direction="horizontal",
|
||||
line_scale=self.line_scale,
|
||||
iterations=self.iterations,
|
||||
)
|
||||
|
||||
areas = scale_areas(self.table_areas, image_scalers)
|
||||
table_bbox = find_joints(areas, vertical_mask, horizontal_mask)
|
||||
|
||||
self.table_bbox_unscaled = copy.deepcopy(table_bbox)
|
||||
|
||||
self.table_bbox = table_bbox
|
||||
self.vertical_segments = vertical_segments
|
||||
self.horizontal_segments = horizontal_segments
|
||||
|
||||
def _generate_columns_and_rows(self, table_idx, tk):
|
||||
cols, rows = zip(*self.table_bbox[tk])
|
||||
cols, rows = list(cols), list(rows)
|
||||
cols.extend([tk[0], tk[2]])
|
||||
rows.extend([tk[1], tk[3]])
|
||||
# sort horizontal and vertical segments
|
||||
cols = merge_close_lines(sorted(cols), line_tol=self.line_tol)
|
||||
rows = merge_close_lines(sorted(rows), line_tol=self.line_tol)
|
||||
# make grid using x and y coord of shortlisted rows and cols
|
||||
cols = [(cols[i], cols[i + 1]) for i in range(0, len(cols) - 1)]
|
||||
rows = [(rows[i], rows[i + 1]) for i in range(0, len(rows) - 1)]
|
||||
|
||||
return cols, rows
|
||||
|
||||
|
||||
def _generate_table(self, table_idx, cols, rows, **kwargs):
|
||||
table = Table(cols, rows)
|
||||
# set table edges to True using ver+hor lines
|
||||
table = table.set_edges(self.vertical_segments, self.horizontal_segments, joint_tol=self.joint_tol)
|
||||
# set table border edges to True
|
||||
table = table.set_border()
|
||||
# set spanning cells to True
|
||||
table = table.set_span()
|
||||
|
||||
_seen = set()
|
||||
for r_idx in range(len(table.cells)):
|
||||
for c_idx in range(len(table.cells[r_idx])):
|
||||
if (r_idx, c_idx) in _seen:
|
||||
continue
|
||||
|
||||
_seen.add((r_idx, c_idx))
|
||||
|
||||
_r_idx = r_idx
|
||||
_c_idx = c_idx
|
||||
|
||||
if table.cells[r_idx][_c_idx].hspan:
|
||||
while not table.cells[r_idx][_c_idx].right:
|
||||
_c_idx += 1
|
||||
_seen.add((r_idx, _c_idx))
|
||||
|
||||
if table.cells[_r_idx][c_idx].vspan:
|
||||
while not table.cells[_r_idx][c_idx].bottom:
|
||||
_r_idx += 1
|
||||
_seen.add((_r_idx, c_idx))
|
||||
|
||||
for i in range(r_idx, _r_idx + 1):
|
||||
for j in range(c_idx, _c_idx + 1):
|
||||
_seen.add((i, j))
|
||||
|
||||
x1 = int(table.cells[r_idx][c_idx].x1)
|
||||
y1 = int(table.cells[_r_idx][_c_idx].y1)
|
||||
|
||||
x2 = int(table.cells[_r_idx][_c_idx].x2)
|
||||
y2 = int(table.cells[r_idx][c_idx].y2)
|
||||
|
||||
with TemporaryDirectory() as tempdir:
|
||||
temp_image_path = os.path.join(tempdir, f"{table_idx}_{r_idx}_{c_idx}.png")
|
||||
|
||||
cell_image = Image.fromarray(self.image[y2:y1, x1:x2])
|
||||
cell_image.save(temp_image_path)
|
||||
|
||||
text = self.reader.readtext(temp_image_path, detail=0)
|
||||
text = " ".join(text)
|
||||
|
||||
table.cells[r_idx][c_idx].text = text
|
||||
|
||||
data = table.data
|
||||
table.df = pd.DataFrame(data)
|
||||
table.shape = table.df.shape
|
||||
|
||||
table.flavor = "lattice_ocr"
|
||||
table.accuracy = 0
|
||||
table.whitespace = 0
|
||||
table.order = table_idx + 1
|
||||
table.page = int(os.path.basename(self.rootname).replace("page-", ""))
|
||||
|
||||
# for plotting
|
||||
table._text = None
|
||||
table._image = (self.image, self.table_bbox_unscaled)
|
||||
table._segments = (self.vertical_segments, self.horizontal_segments)
|
||||
table._textedges = None
|
||||
|
||||
return table
|
||||
|
||||
def extract_tables(self, filename, suppress_stdout=False, layout_kwargs={}):
|
||||
self._generate_layout(filename, layout_kwargs)
|
||||
if not suppress_stdout:
|
||||
logger.info("Processing {}".format(os.path.basename(self.rootname)))
|
||||
|
||||
self._generate_image()
|
||||
self._generate_table_bbox()
|
||||
|
||||
_tables = []
|
||||
# sort tables based on y-coord
|
||||
for table_idx, tk in enumerate(
|
||||
sorted(self.table_bbox.keys(), key=lambda x: x[1], reverse=True)
|
||||
):
|
||||
cols, rows = self._generate_columns_and_rows(table_idx, tk)
|
||||
table = self._generate_table(table_idx, cols, rows)
|
||||
table._bbox = tk
|
||||
_tables.append(table)
|
||||
|
||||
return _tables
|
||||
|
|
@ -65,7 +65,7 @@ class Stream(BaseParser):
|
|||
edge_tol=50,
|
||||
row_tol=2,
|
||||
column_tol=0,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
self.table_regions = table_regions
|
||||
self.table_areas = table_areas
|
||||
|
|
@ -362,10 +362,10 @@ class Stream(BaseParser):
|
|||
if len(elements):
|
||||
ncols = max(set(elements), key=elements.count)
|
||||
else:
|
||||
warnings.warn(
|
||||
f"No tables found in table area {table_idx + 1}"
|
||||
)
|
||||
cols = [(t.x0, t.x1) for r in rows_grouped if len(r) == ncols for t in r]
|
||||
warnings.warn(f"No tables found in table area {table_idx + 1}")
|
||||
cols = [
|
||||
(t.x0, t.x1) for r in rows_grouped if len(r) == ncols for t in r
|
||||
]
|
||||
cols = self._merge_columns(sorted(cols), column_tol=self.column_tol)
|
||||
inner_text = []
|
||||
for i in range(1, len(cols)):
|
||||
|
|
|
|||
|
|
@ -1,7 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .base import BaseParser
|
||||
|
||||
|
||||
class StreamOCR(BaseParser):
|
||||
pass
|
||||
|
|
@ -34,13 +34,9 @@ class PlotMethods(object):
|
|||
raise ImportError("matplotlib is required for plotting.")
|
||||
|
||||
if table.flavor == "lattice" and kind in ["textedge"]:
|
||||
raise NotImplementedError(
|
||||
f"Lattice flavor does not support kind='{kind}'"
|
||||
)
|
||||
raise NotImplementedError(f"Lattice flavor does not support kind='{kind}'")
|
||||
elif table.flavor == "stream" and kind in ["joint", "line"]:
|
||||
raise NotImplementedError(
|
||||
f"Stream flavor does not support kind='{kind}'"
|
||||
)
|
||||
raise NotImplementedError(f"Stream flavor does not support kind='{kind}'")
|
||||
|
||||
plot_method = getattr(self, kind)
|
||||
fig = plot_method(table)
|
||||
|
|
|
|||
|
|
@ -93,6 +93,7 @@ def download_url(url):
|
|||
return filepath
|
||||
|
||||
|
||||
stream_kwargs = ["columns", "edge_tol", "row_tol", "column_tol"]
|
||||
lattice_kwargs = [
|
||||
"process_background",
|
||||
"line_scale",
|
||||
|
|
@ -105,7 +106,6 @@ lattice_kwargs = [
|
|||
"iterations",
|
||||
"resolution",
|
||||
]
|
||||
stream_kwargs = ["columns", "edge_tol", "row_tol", "column_tol"]
|
||||
|
||||
|
||||
def validate_input(kwargs, flavor="lattice"):
|
||||
|
|
@ -116,14 +116,14 @@ def validate_input(kwargs, flavor="lattice"):
|
|||
f"{','.join(sorted(isec))} cannot be used with flavor='{flavor}'"
|
||||
)
|
||||
|
||||
if flavor in ["lattice", "lattice_ocr"]:
|
||||
if flavor == "lattice":
|
||||
check_intersection(stream_kwargs, kwargs)
|
||||
else:
|
||||
check_intersection(lattice_kwargs, kwargs)
|
||||
|
||||
|
||||
def remove_extra(kwargs, flavor="lattice"):
|
||||
if flavor in ["lattice", "lattice_ocr"]:
|
||||
if flavor == "lattice":
|
||||
for key in kwargs.keys():
|
||||
if key in stream_kwargs:
|
||||
kwargs.pop(key)
|
||||
|
|
@ -838,23 +838,27 @@ def compute_whitespace(d):
|
|||
|
||||
def get_page_layout(
|
||||
filename,
|
||||
line_overlap=0.5,
|
||||
char_margin=1.0,
|
||||
line_margin=0.5,
|
||||
word_margin=0.1,
|
||||
boxes_flow=0.5,
|
||||
detect_vertical=True,
|
||||
all_texts=True,
|
||||
):
|
||||
"""Returns a PDFMiner LTPage object and page dimension of a single
|
||||
page pdf. See https://euske.github.io/pdfminer/ to get definitions
|
||||
of kwargs.
|
||||
page pdf. To get the definitions of kwargs, see
|
||||
https://pdfminersix.rtfd.io/en/latest/reference/composable.html.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
filename : string
|
||||
Path to pdf file.
|
||||
line_overlap : float
|
||||
char_margin : float
|
||||
line_margin : float
|
||||
word_margin : float
|
||||
boxes_flow : float
|
||||
detect_vertical : bool
|
||||
all_texts : bool
|
||||
|
||||
|
|
@ -870,11 +874,15 @@ def get_page_layout(
|
|||
parser = PDFParser(f)
|
||||
document = PDFDocument(parser)
|
||||
if not document.is_extractable:
|
||||
raise PDFTextExtractionNotAllowed(f"Text extraction is not allowed: {filename}")
|
||||
raise PDFTextExtractionNotAllowed(
|
||||
f"Text extraction is not allowed: {filename}"
|
||||
)
|
||||
laparams = LAParams(
|
||||
line_overlap=line_overlap,
|
||||
char_margin=char_margin,
|
||||
line_margin=line_margin,
|
||||
word_margin=word_margin,
|
||||
boxes_flow=boxes_flow,
|
||||
detect_vertical=detect_vertical,
|
||||
all_texts=all_texts,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,19 @@
|
|||
# flasky pygments style based on tango style
|
||||
from pygments.style import Style
|
||||
from pygments.token import Keyword, Name, Comment, String, Error, \
|
||||
Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
|
||||
from pygments.token import (
|
||||
Keyword,
|
||||
Name,
|
||||
Comment,
|
||||
String,
|
||||
Error,
|
||||
Number,
|
||||
Operator,
|
||||
Generic,
|
||||
Whitespace,
|
||||
Punctuation,
|
||||
Other,
|
||||
Literal,
|
||||
)
|
||||
|
||||
|
||||
class FlaskyStyle(Style):
|
||||
|
|
@ -14,10 +26,8 @@ class FlaskyStyle(Style):
|
|||
Whitespace: "underline #f8f8f8", # class: 'w'
|
||||
Error: "#a40000 border:#ef2929", # class: 'err'
|
||||
Other: "#000000", # class 'x'
|
||||
|
||||
Comment: "italic #8f5902", # class: 'c'
|
||||
Comment.Preproc: "noitalic", # class: 'cp'
|
||||
|
||||
Keyword: "bold #004461", # class: 'k'
|
||||
Keyword.Constant: "bold #004461", # class: 'kc'
|
||||
Keyword.Declaration: "bold #004461", # class: 'kd'
|
||||
|
|
@ -25,12 +35,9 @@ class FlaskyStyle(Style):
|
|||
Keyword.Pseudo: "bold #004461", # class: 'kp'
|
||||
Keyword.Reserved: "bold #004461", # class: 'kr'
|
||||
Keyword.Type: "bold #004461", # class: 'kt'
|
||||
|
||||
Operator: "#582800", # class: 'o'
|
||||
Operator.Word: "bold #004461", # class: 'ow' - like keywords
|
||||
|
||||
Punctuation: "bold #000000", # class: 'p'
|
||||
|
||||
# because special names such as Name.Class, Name.Function, etc.
|
||||
# are not recognized as such later in the parsing, we choose them
|
||||
# to look the same as ordinary variables.
|
||||
|
|
@ -53,12 +60,9 @@ class FlaskyStyle(Style):
|
|||
Name.Variable.Class: "#000000", # class: 'vc' - to be revised
|
||||
Name.Variable.Global: "#000000", # class: 'vg' - to be revised
|
||||
Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
|
||||
|
||||
Number: "#990000", # class: 'm'
|
||||
|
||||
Literal: "#000000", # class: 'l'
|
||||
Literal.Date: "#000000", # class: 'ld'
|
||||
|
||||
String: "#4e9a06", # class: 's'
|
||||
String.Backtick: "#4e9a06", # class: 'sb'
|
||||
String.Char: "#4e9a06", # class: 'sc'
|
||||
|
|
@ -71,7 +75,6 @@ class FlaskyStyle(Style):
|
|||
String.Regex: "#4e9a06", # class: 'sr'
|
||||
String.Single: "#4e9a06", # class: 's1'
|
||||
String.Symbol: "#4e9a06", # class: 'ss'
|
||||
|
||||
Generic: "#000000", # class: 'g'
|
||||
Generic.Deleted: "#a40000", # class: 'gd'
|
||||
Generic.Emph: "italic #000000", # class: 'ge'
|
||||
|
|
|
|||
96
docs/conf.py
|
|
@ -22,8 +22,8 @@ import sys
|
|||
# sys.path.insert(0, os.path.abspath('..'))
|
||||
|
||||
# Insert Camelot's path into the system.
|
||||
sys.path.insert(0, os.path.abspath('..'))
|
||||
sys.path.insert(0, os.path.abspath('_themes'))
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
sys.path.insert(0, os.path.abspath("_themes"))
|
||||
|
||||
import camelot
|
||||
|
||||
|
|
@ -38,33 +38,33 @@ import camelot
|
|||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.viewcode',
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.napoleon",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.todo",
|
||||
"sphinx.ext.viewcode",
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
#
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
#
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u'Camelot'
|
||||
copyright = u'2020, Camelot Developers'
|
||||
author = u'Vinayak Mehta'
|
||||
project = u"Camelot"
|
||||
copyright = u"2021, Camelot Developers"
|
||||
author = u"Vinayak Mehta"
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
|
|
@ -94,7 +94,7 @@ language = None
|
|||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This patterns also effect to html_static_path and html_extra_path
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
|
|
@ -114,7 +114,7 @@ add_module_names = True
|
|||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'flask_theme_support.FlaskyStyle'
|
||||
pygments_style = "flask_theme_support.FlaskyStyle"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
|
|
@ -130,18 +130,18 @@ todo_include_todos = True
|
|||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'alabaster'
|
||||
html_theme = "alabaster"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
html_theme_options = {
|
||||
'show_powered_by': False,
|
||||
'github_user': 'camelot-dev',
|
||||
'github_repo': 'camelot',
|
||||
'github_banner': True,
|
||||
'show_related': False,
|
||||
'note_bg': '#FFF59C'
|
||||
"show_powered_by": False,
|
||||
"github_user": "camelot-dev",
|
||||
"github_repo": "camelot",
|
||||
"github_banner": True,
|
||||
"show_related": False,
|
||||
"note_bg": "#FFF59C",
|
||||
}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
|
|
@ -164,12 +164,12 @@ html_theme_options = {
|
|||
# The name of an image file (relative to this directory) to use as a favicon of
|
||||
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
html_favicon = '_static/favicon.ico'
|
||||
html_favicon = "_static/favicon.ico"
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
|
|
@ -189,10 +189,21 @@ html_use_smartypants = True
|
|||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
html_sidebars = {
|
||||
'index': ['sidebarintro.html', 'relations.html', 'sourcelink.html',
|
||||
'searchbox.html', 'hacks.html'],
|
||||
'**': ['sidebarlogo.html', 'localtoc.html', 'relations.html',
|
||||
'sourcelink.html', 'searchbox.html', 'hacks.html']
|
||||
"index": [
|
||||
"sidebarintro.html",
|
||||
"relations.html",
|
||||
"sourcelink.html",
|
||||
"searchbox.html",
|
||||
"hacks.html",
|
||||
],
|
||||
"**": [
|
||||
"sidebarlogo.html",
|
||||
"localtoc.html",
|
||||
"relations.html",
|
||||
"sourcelink.html",
|
||||
"searchbox.html",
|
||||
"hacks.html",
|
||||
],
|
||||
}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
|
|
@ -249,7 +260,7 @@ html_show_copyright = True
|
|||
# html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'Camelotdoc'
|
||||
htmlhelp_basename = "Camelotdoc"
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
|
|
@ -257,15 +268,12 @@ latex_elements = {
|
|||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#
|
||||
# 'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#
|
||||
# 'figure_align': 'htbp',
|
||||
|
|
@ -275,8 +283,7 @@ latex_elements = {
|
|||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'Camelot.tex', u'Camelot Documentation',
|
||||
u'Vinayak Mehta', 'manual'),
|
||||
(master_doc, "Camelot.tex", u"Camelot Documentation", u"Vinayak Mehta", "manual"),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
|
|
@ -316,10 +323,7 @@ latex_documents = [
|
|||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'Camelot', u'Camelot Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
man_pages = [(master_doc, "Camelot", u"Camelot Documentation", [author], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#
|
||||
|
|
@ -332,9 +336,15 @@ man_pages = [
|
|||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'Camelot', u'Camelot Documentation',
|
||||
author, 'Camelot', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
(
|
||||
master_doc,
|
||||
"Camelot",
|
||||
u"Camelot Documentation",
|
||||
author,
|
||||
"Camelot",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
|
|
@ -356,6 +366,6 @@ texinfo_documents = [
|
|||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {
|
||||
'https://docs.python.org/2': None,
|
||||
'http://pandas.pydata.org/pandas-docs/stable': None
|
||||
"https://docs.python.org/2": None,
|
||||
"http://pandas.pydata.org/pandas-docs/stable": None,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ Release v\ |version|. (:ref:`Installation <install>`)
|
|||
>>> tables = camelot.read_pdf('foo.pdf')
|
||||
>>> tables
|
||||
<TableList n=1>
|
||||
>>> tables.export('foo.csv', f='csv', compress=True) # json, excel, html
|
||||
>>> tables.export('foo.csv', f='csv', compress=True) # json, excel, html, markdown, sqlite
|
||||
>>> tables[0]
|
||||
<Table shape=(7, 7)>
|
||||
>>> tables[0].parsing_report
|
||||
|
|
@ -64,7 +64,7 @@ Release v\ |version|. (:ref:`Installation <install>`)
|
|||
'order': 1,
|
||||
'page': 1
|
||||
}
|
||||
>>> tables[0].to_csv('foo.csv') # to_json, to_excel, to_html
|
||||
>>> tables[0].to_csv('foo.csv') # to_json, to_excel, to_html, to_markdown, to_sqlite
|
||||
>>> tables[0].df # get a pandas DataFrame!
|
||||
|
||||
.. csv-table::
|
||||
|
|
@ -74,14 +74,16 @@ Camelot also comes packaged with a :ref:`command-line interface <cli>`!
|
|||
|
||||
.. note:: Camelot only works with text-based PDFs and not scanned documents. (As Tabula `explains`_, "If you can click and drag to select text in your table in a PDF viewer, then your PDF is text-based".)
|
||||
|
||||
You can check out some frequently asked questions :ref:`here <faq>`.
|
||||
|
||||
.. _explains: https://github.com/tabulapdf/tabula#why-tabula
|
||||
|
||||
Why Camelot?
|
||||
------------
|
||||
|
||||
- **Configurability**: Camelot gives you control over the table extraction process with its :ref:`tweakable settings <advanced>`.
|
||||
- **Metrics**: Bad tables can be discarded based on metrics like accuracy and whitespace, without having to manually look at each table.
|
||||
- **Output**: Each table is extracted into a **pandas DataFrame**, which seamlessly integrates into `ETL and data analysis workflows`_. You can also export tables to multiple formats, which include CSV, JSON, Excel, HTML and Sqlite.
|
||||
- **Configurability**: Camelot gives you control over the table extraction process with :ref:`tweakable settings <advanced>`.
|
||||
- **Metrics**: You can discard bad tables based on metrics like accuracy and whitespace, without having to manually look at each table.
|
||||
- **Output**: Each table is extracted into a **pandas DataFrame**, which seamlessly integrates into `ETL and data analysis workflows`_. You can also export tables to multiple formats, which include CSV, JSON, Excel, HTML, Markdown, and Sqlite.
|
||||
|
||||
.. _ETL and data analysis workflows: https://gist.github.com/vinayak-mehta/e5949f7c2410a0e12f25d3682dc9e873
|
||||
|
||||
|
|
@ -110,6 +112,7 @@ This part of the documentation begins with some background information about why
|
|||
user/how-it-works
|
||||
user/quickstart
|
||||
user/advanced
|
||||
user/faq
|
||||
user/cli
|
||||
|
||||
The API Documentation/Guide
|
||||
|
|
|
|||
|
|
@ -618,8 +618,31 @@ Tweak layout generation
|
|||
|
||||
Camelot is built on top of PDFMiner's functionality of grouping characters on a page into words and sentences. In some cases (such as `#170 <https://github.com/camelot-dev/camelot/issues/170>`_ and `#215 <https://github.com/camelot-dev/camelot/issues/215>`_), PDFMiner can group characters that should belong to the same sentence into separate sentences.
|
||||
|
||||
To deal with such cases, you can tweak PDFMiner's `LAParams kwargs <https://github.com/euske/pdfminer/blob/master/pdfminer/layout.py#L33>`_ to improve layout generation, by passing the keyword arguments as a dict using ``layout_kwargs`` in :meth:`read_pdf() <camelot.read_pdf>`. To know more about the parameters you can tweak, you can check out `PDFMiner docs <https://euske.github.io/pdfminer/>`_.
|
||||
To deal with such cases, you can tweak PDFMiner's `LAParams kwargs <https://github.com/euske/pdfminer/blob/master/pdfminer/layout.py#L33>`_ to improve layout generation, by passing the keyword arguments as a dict using ``layout_kwargs`` in :meth:`read_pdf() <camelot.read_pdf>`. To know more about the parameters you can tweak, you can check out `PDFMiner docs <https://pdfminersix.rtfd.io/en/latest/reference/composable.html>`_.
|
||||
|
||||
::
|
||||
|
||||
>>> tables = camelot.read_pdf('foo.pdf', layout_kwargs={'detect_vertical': False})
|
||||
|
||||
.. _image-conversion-backend:
|
||||
|
||||
Use alternate image conversion backends
|
||||
---------------------------------------
|
||||
|
||||
When using the :ref:`Lattice <lattice>` flavor, Camelot uses ``ghostscript`` to convert PDF pages to images for line recognition. If you face installation issues with ``ghostscript``, you can use an alternate image conversion backend called ``poppler``. You can specify which image conversion backend you want to use with::
|
||||
|
||||
>>> tables = camelot.read_pdf(filename, backend="ghostscript") # default
|
||||
>>> tables = camelot.read_pdf(filename, backend="poppler")
|
||||
|
||||
.. note:: ``ghostscript`` will be replaced by ``poppler`` as the default image conversion backend in ``v0.12.0``.
|
||||
|
||||
If you face issues with both ``ghostscript`` and ``poppler``, you can supply your own image conversion backend::
|
||||
|
||||
>>> class ConversionBackend(object):
|
||||
>>> def convert(pdf_path, png_path):
|
||||
>>> # read pdf page from pdf_path
|
||||
>>> # convert pdf page to image
|
||||
>>> # write image to png_path
|
||||
>>> pass
|
||||
>>>
|
||||
>>> tables = camelot.read_pdf(filename, backend=ConversionBackend())
|
||||
|
|
|
|||
|
|
@ -0,0 +1,70 @@
|
|||
.. _faq:
|
||||
|
||||
Frequently Asked Questions
|
||||
==========================
|
||||
|
||||
This part of the documentation answers some common questions. To add questions, please open an issue `here <https://github.com/camelot-dev/camelot/issues/new>`_.
|
||||
|
||||
Does Camelot work with image-based PDFs?
|
||||
----------------------------------------
|
||||
|
||||
**No**, Camelot only works with text-based PDFs and not scanned documents. (As Tabula `explains <https://github.com/tabulapdf/tabula#why-tabula>`_, "If you can click and drag to select text in your table in a PDF viewer, then your PDF is text-based".)
|
||||
|
||||
How to reduce memory usage for long PDFs?
|
||||
-----------------------------------------
|
||||
|
||||
During table extraction from long PDF documents, RAM usage can grow significantly.
|
||||
|
||||
A simple workaround is to divide the extraction into chunks, and save extracted data to disk at the end of every chunk.
|
||||
|
||||
For more details, check out this code snippet from `@anakin87 <https://github.com/anakin87>`_:
|
||||
|
||||
::
|
||||
|
||||
import camelot
|
||||
|
||||
|
||||
def chunks(l, n):
|
||||
"""Yield successive n-sized chunks from l."""
|
||||
for i in range(0, len(l), n):
|
||||
yield l[i : i + n]
|
||||
|
||||
|
||||
def extract_tables(filepath, pages, chunks=50, export_path=".", params={}):
|
||||
"""
|
||||
Divide the extraction work into n chunks. At the end of every chunk,
|
||||
save data on disk and free RAM.
|
||||
|
||||
filepath : str
|
||||
Filepath or URL of the PDF file.
|
||||
pages : str, optional (default: '1')
|
||||
Comma-separated page numbers.
|
||||
Example: '1,3,4' or '1,4-end' or 'all'.
|
||||
"""
|
||||
|
||||
# get list of pages from camelot.handlers.PDFHandler
|
||||
handler = camelot.handlers.PDFHandler(filepath)
|
||||
page_list = handler._get_pages(filepath, pages=pages)
|
||||
|
||||
# chunk pages list
|
||||
page_chunks = list(chunks(page_list, chunks))
|
||||
|
||||
# extraction and export
|
||||
for chunk in page_chunks:
|
||||
pages_string = str(chunk).replace("[", "").replace("]", "")
|
||||
tables = camelot.read_pdf(filepath, pages=pages_string, **params)
|
||||
tables.export(f"{export_path}/tables.csv")
|
||||
|
||||
How can I supply my own image conversion backend to Lattice?
|
||||
------------------------------------------------------------
|
||||
|
||||
When using the :ref:`Lattice <lattice>` flavor, you can supply your own :ref:`image conversion backend <image-conversion-backend>` by creating a class with a ``convert`` method as follows::
|
||||
|
||||
>>> class ConversionBackend(object):
|
||||
>>> def convert(pdf_path, png_path):
|
||||
>>> # read pdf page from pdf_path
|
||||
>>> # convert pdf page to image
|
||||
>>> # write image to png_path
|
||||
>>> pass
|
||||
>>>
|
||||
>>> tables = camelot.read_pdf(filename, backend=ConversionBackend())
|
||||
|
|
@ -43,8 +43,9 @@ For Ubuntu/MacOS::
|
|||
|
||||
For Windows::
|
||||
|
||||
>>> import ctypes
|
||||
>>> from ctypes.util import find_library
|
||||
>>> find_library("".join(("gsdll", str(ctypes.sizeof(ctypes.c_voidp) * 8), ".dll"))
|
||||
>>> find_library("".join(("gsdll", str(ctypes.sizeof(ctypes.c_voidp) * 8), ".dll")))
|
||||
<name-of-ghostscript-library-on-windows>
|
||||
|
||||
**Check:** The output of the ``find_library`` function should not be empty.
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ pip
|
|||
|
||||
To install Camelot from PyPI using ``pip``, please include the extra ``cv`` requirement as shown::
|
||||
|
||||
$ pip install "camelot-py[cv]"
|
||||
$ pip install "camelot-py[base]"
|
||||
|
||||
conda
|
||||
-----
|
||||
|
|
@ -37,4 +37,4 @@ After :ref:`installing the dependencies <install_deps>`, you can install Camelot
|
|||
::
|
||||
|
||||
$ cd camelot
|
||||
$ pip install ".[cv]"
|
||||
$ pip install ".[base]"
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ Woah! The accuracy is top-notch and there is less whitespace, which means the ta
|
|||
.. csv-table::
|
||||
:file: ../_static/csv/foo.csv
|
||||
|
||||
Looks good! You can now export the table as a CSV file using its :meth:`to_csv() <camelot.core.Table.to_csv>` method. Alternatively you can use :meth:`to_json() <camelot.core.Table.to_json>`, :meth:`to_excel() <camelot.core.Table.to_excel>` :meth:`to_html() <camelot.core.Table.to_html>` or :meth:`to_sqlite() <camelot.core.Table.to_sqlite>` methods to export the table as JSON, Excel, HTML files or a sqlite database respectively.
|
||||
Looks good! You can now export the table as a CSV file using its :meth:`to_csv() <camelot.core.Table.to_csv>` method. Alternatively you can use :meth:`to_json() <camelot.core.Table.to_json>`, :meth:`to_excel() <camelot.core.Table.to_excel>` :meth:`to_html() <camelot.core.Table.to_html>` :meth:`to_markdown() <camelot.core.Table.to_markdown>` or :meth:`to_sqlite() <camelot.core.Table.to_sqlite>` methods to export the table as JSON, Excel, HTML files or a sqlite database respectively.
|
||||
|
||||
::
|
||||
|
||||
|
|
@ -76,7 +76,7 @@ You can also export all tables at once, using the :class:`tables <camelot.core.T
|
|||
|
||||
$ camelot --format csv --output foo.csv lattice foo.pdf
|
||||
|
||||
This will export all tables as CSV files at the path specified. Alternatively, you can use ``f='json'``, ``f='excel'``, ``f='html'`` or ``f='sqlite'``.
|
||||
This will export all tables as CSV files at the path specified. Alternatively, you can use ``f='json'``, ``f='excel'``, ``f='html'``, ``f='markdown'`` or ``f='sqlite'``.
|
||||
|
||||
.. note:: The :meth:`export() <camelot.core.TableList.export>` method exports files with a ``page-*-table-*`` suffix. In the example above, the single table in the list will be exported to ``foo-page-1-table-1.csv``. If the list contains multiple tables, multiple CSV files will be created. To avoid filling up your path with multiple files, you can use ``compress=True``, which will create a single ZIP file at your path with all the CSV files.
|
||||
|
||||
|
|
|
|||
88
setup.py
|
|
@ -6,80 +6,78 @@ from setuptools import find_packages
|
|||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
about = {}
|
||||
with open(os.path.join(here, 'camelot', '__version__.py'), 'r') as f:
|
||||
with open(os.path.join(here, "camelot", "__version__.py"), "r") as f:
|
||||
exec(f.read(), about)
|
||||
|
||||
with open('README.md', 'r') as f:
|
||||
with open("README.md", "r") as f:
|
||||
readme = f.read()
|
||||
|
||||
|
||||
requires = [
|
||||
'chardet>=3.0.4',
|
||||
'click>=6.7',
|
||||
'numpy>=1.13.3',
|
||||
'openpyxl>=2.5.8',
|
||||
'pandas>=0.23.4',
|
||||
'pdfminer.six>=20200726',
|
||||
'PyPDF2>=1.26.0'
|
||||
"chardet>=3.0.4",
|
||||
"click>=6.7",
|
||||
"numpy>=1.13.3",
|
||||
"openpyxl>=2.5.8",
|
||||
"pandas>=0.23.4",
|
||||
"pdfminer.six>=20200726",
|
||||
"PyPDF2>=1.26.0",
|
||||
"tabulate>=0.8.9",
|
||||
]
|
||||
|
||||
cv_requires = [
|
||||
'opencv-python>=3.4.2.17'
|
||||
]
|
||||
|
||||
ocr_requires = [
|
||||
'easyocr>=1.1.10'
|
||||
]
|
||||
base_requires = ["ghostscript>=0.7", "opencv-python>=3.4.2.17", "pdftopng>=0.2.3"]
|
||||
|
||||
plot_requires = [
|
||||
'matplotlib>=2.2.3',
|
||||
"matplotlib>=2.2.3",
|
||||
]
|
||||
|
||||
dev_requires = [
|
||||
'codecov>=2.0.15',
|
||||
'pytest>=5.4.3',
|
||||
'pytest-cov>=2.10.0',
|
||||
'pytest-mpl>=0.11',
|
||||
'pytest-runner>=5.2',
|
||||
'Sphinx>=3.1.2'
|
||||
"codecov>=2.0.15",
|
||||
"pytest>=5.4.3",
|
||||
"pytest-cov>=2.10.0",
|
||||
"pytest-mpl>=0.11",
|
||||
"pytest-runner>=5.2",
|
||||
"Sphinx>=3.1.2",
|
||||
"sphinx-autobuild>=2021.3.14",
|
||||
]
|
||||
|
||||
all_requires = cv_requires + ocr_requires + plot_requires
|
||||
all_requires = base_requires + plot_requires
|
||||
dev_requires = dev_requires + all_requires
|
||||
|
||||
|
||||
def setup_package():
|
||||
metadata = dict(name=about['__title__'],
|
||||
version=about['__version__'],
|
||||
description=about['__description__'],
|
||||
metadata = dict(
|
||||
name=about["__title__"],
|
||||
version=about["__version__"],
|
||||
description=about["__description__"],
|
||||
long_description=readme,
|
||||
long_description_content_type="text/markdown",
|
||||
url=about['__url__'],
|
||||
author=about['__author__'],
|
||||
author_email=about['__author_email__'],
|
||||
license=about['__license__'],
|
||||
packages=find_packages(exclude=('tests',)),
|
||||
url=about["__url__"],
|
||||
author=about["__author__"],
|
||||
author_email=about["__author_email__"],
|
||||
license=about["__license__"],
|
||||
packages=find_packages(exclude=("tests",)),
|
||||
install_requires=requires,
|
||||
extras_require={
|
||||
'cv': cv_requires,
|
||||
'ocr': ocr_requires,
|
||||
'plot': plot_requires,
|
||||
'all': all_requires,
|
||||
'dev': dev_requires,
|
||||
"all": all_requires,
|
||||
"base": base_requires,
|
||||
"cv": base_requires, # deprecate
|
||||
"dev": dev_requires,
|
||||
"plot": plot_requires,
|
||||
},
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'camelot = camelot.cli:cli',
|
||||
"console_scripts": [
|
||||
"camelot = camelot.cli:cli",
|
||||
],
|
||||
},
|
||||
classifiers=[
|
||||
# Trove classifiers
|
||||
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8'
|
||||
])
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
],
|
||||
)
|
||||
|
||||
try:
|
||||
from setuptools import setup
|
||||
|
|
@ -89,5 +87,5 @@ def setup_package():
|
|||
setup(**metadata)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
setup_package()
|
||||
|
|
|
|||
508
tests/data.py
|
|
@ -2800,49 +2800,467 @@ data_stream_layout_kwargs = [
|
|||
]
|
||||
|
||||
data_stream_duplicated_text = [
|
||||
['', '2012 BETTER VARIETIES Harvest Report for Minnesota Central [ MNCE ]', '', '', '', '', '', '', '', '',
|
||||
'ALL SEASON TEST'],
|
||||
['', 'Doug Toreen, Renville County, MN 55310 [ BIRD ISLAND ]', '', '', '', '', '', '', '', '',
|
||||
'1.3 - 2.0 MAT. GROUP'],
|
||||
['PREV. CROP/HERB:', 'Corn / Surpass, Roundup', '', '', '', '', '', '', '', '', 'S2MNCE01'],
|
||||
['SOIL DESCRIPTION:', '', 'Canisteo clay loam, mod. well drained, non-irrigated', '', '', '', '', '', '', '', ''],
|
||||
['SOIL CONDITIONS:', '', 'High P, high K, 6.7 pH, 3.9% OM, Low SCN', '', '', '', '', '', '', '', '30" ROW SPACING'],
|
||||
['TILLAGE/CULTIVATION:', 'conventional w/ fall till', '', '', '', '', '', '', '', '', ''],
|
||||
['PEST MANAGEMENT:', 'Roundup twice', '', '', '', '', '', '', '', '', ''],
|
||||
['SEEDED - RATE:', 'May 15', '140 000 /A', '', '', '', '', '', '', 'TOP 30 for YIELD of 63 TESTED', ''],
|
||||
['HARVESTED - STAND:', 'Oct 3', '122 921 /A', '', '', '', '', '', '', 'AVERAGE of (3) REPLICATIONS', ''],
|
||||
['', '', '', '', 'SCN', 'Seed', 'Yield', 'Moisture', 'Lodging', 'Stand', 'Gross'],
|
||||
['Company/Brand', 'Product/Brand†', 'Technol.†', 'Mat.', 'Resist.', 'Trmt.†', 'Bu/A', '%', '%', '(x 1000)',
|
||||
'Income'], ['Kruger', 'K2 1901', 'RR2Y', '1.9', 'R', 'Ac,PV', '56.4', '7.6', '0', '126.3', '$846'],
|
||||
['Stine', '19RA02 §', 'RR2Y', '1.9', 'R', 'CMB', '55.3', '7.6', '0', '120.0', '$830'],
|
||||
['Wensman', 'W 3190NR2', 'RR2Y', '1.9', 'R', 'Ac', '54.5', '7.6', '0', '119.5', '$818'],
|
||||
['Hefty', 'H17Y12', 'RR2Y', '1.7', 'MR', 'I', '53.7', '7.7', '0', '124.4', '$806'],
|
||||
['Dyna-Gro', 'S15RY53', 'RR2Y', '1.5', 'R', 'Ac', '53.6', '7.7', '0', '126.8', '$804'],
|
||||
['LG Seeds', 'C2050R2', 'RR2Y', '2.1', 'R', 'Ac', '53.6', '7.7', '0', '123.9', '$804'],
|
||||
['Titan Pro', '19M42', 'RR2Y', '1.9', 'R', 'CMB', '53.6', '7.7', '0', '121.0', '$804'],
|
||||
['Stine', '19RA02 (2) §', 'RR2Y', '1.9', 'R', 'CMB', '53.4', '7.7', '0', '123.9', '$801'],
|
||||
['Asgrow', 'AG1832 §', 'RR2Y', '1.8', 'MR', 'Ac,PV', '52.9', '7.7', '0', '122.0', '$794'],
|
||||
['Prairie Brand', 'PB-1566R2', 'RR2Y', '1.5', 'R', 'CMB', '52.8', '7.7', '0', '122.9', '$792'],
|
||||
['Channel', '1901R2', 'RR2Y', '1.9', 'R', 'Ac,PV', '52.8', '7.6', '0', '123.4', '$791'],
|
||||
['Titan Pro', '20M1', 'RR2Y', '2.0', 'R', 'Am', '52.5', '7.5', '0', '124.4', '$788'],
|
||||
['Kruger', 'K2-2002', 'RR2Y', '2.0', 'R', 'Ac,PV', '52.4', '7.9', '0', '125.4', '$786'],
|
||||
['Channel', '1700R2', 'RR2Y', '1.7', 'R', 'Ac,PV', '52.3', '7.9', '0', '123.9', '$784'],
|
||||
['Hefty', 'H16Y11', 'RR2Y', '1.6', 'MR', 'I', '51.4', '7.6', '0', '123.9', '$771'],
|
||||
['Anderson', '162R2Y', 'RR2Y', '1.6', 'R', 'None', '51.3', '7.5', '0', '119.5', '$770'],
|
||||
['Titan Pro', '15M22', 'RR2Y', '1.5', 'R', 'CMB', '51.3', '7.8', '0', '125.4', '$769'],
|
||||
['Dairyland', 'DSR-1710R2Y', 'RR2Y', '1.7', 'R', 'CMB', '51.3', '7.7', '0', '122.0', '$769'],
|
||||
['Hefty', 'H20R3', 'RR2Y', '2.0', 'MR', 'I', '50.5', '8.2', '0', '121.0', '$757'],
|
||||
['Prairie Brand', 'PB 1743R2', 'RR2Y', '1.7', 'R', 'CMB', '50.2', '7.7', '0', '125.8', '$752'],
|
||||
['Gold Country', '1741', 'RR2Y', '1.7', 'R', 'Ac', '50.1', '7.8', '0', '123.9', '$751'],
|
||||
['Trelay', '20RR43', 'RR2Y', '2.0', 'R', 'Ac,Ex', '49.9', '7.6', '0', '127.8', '$749'],
|
||||
['Hefty', 'H14R3', 'RR2Y', '1.4', 'MR', 'I', '49.7', '7.7', '0', '122.9', '$746'],
|
||||
['Prairie Brand', 'PB-2099NRR2', 'RR2Y', '2.0', 'R', 'CMB', '49.6', '7.8', '0', '126.3', '$743'],
|
||||
['Wensman', 'W 3174NR2', 'RR2Y', '1.7', 'R', 'Ac', '49.3', '7.6', '0', '122.5', '$740'],
|
||||
['Kruger', 'K2 1602', 'RR2Y', '1.6', 'R', 'Ac,PV', '48.7', '7.6', '0', '125.4', '$731'],
|
||||
['NK Brand', 'S18-C2 §', 'RR2Y', '1.8', 'R', 'CMB', '48.7', '7.7', '0', '126.8', '$731'],
|
||||
['Kruger', 'K2 1902', 'RR2Y', '1.9', 'R', 'Ac,PV', '48.7', '7.5', '0', '124.4', '$730'],
|
||||
['Prairie Brand', 'PB-1823R2', 'RR2Y', '1.8', 'R', 'None', '48.5', '7.6', '0', '121.0', '$727'],
|
||||
['Gold Country', '1541', 'RR2Y', '1.5', 'R', 'Ac', '48.4', '7.6', '0', '110.4', '$726'],
|
||||
['', '', '', '', '', 'Test Average =', '47.6', '7.7', '0', '122.9', '$713'],
|
||||
['', '', '', '', '', 'LSD (0.10) =', '5.7', '0.3', 'ns', '37.8', '566.4']
|
||||
[
|
||||
"",
|
||||
"2012 BETTER VARIETIES Harvest Report for Minnesota Central [ MNCE ]",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"ALL SEASON TEST",
|
||||
],
|
||||
[
|
||||
"",
|
||||
"Doug Toreen, Renville County, MN 55310 [ BIRD ISLAND ]",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"1.3 - 2.0 MAT. GROUP",
|
||||
],
|
||||
[
|
||||
"PREV. CROP/HERB:",
|
||||
"Corn / Surpass, Roundup",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"S2MNCE01",
|
||||
],
|
||||
[
|
||||
"SOIL DESCRIPTION:",
|
||||
"",
|
||||
"Canisteo clay loam, mod. well drained, non-irrigated",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
],
|
||||
[
|
||||
"SOIL CONDITIONS:",
|
||||
"",
|
||||
"High P, high K, 6.7 pH, 3.9% OM, Low SCN",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
'30" ROW SPACING',
|
||||
],
|
||||
[
|
||||
"TILLAGE/CULTIVATION:",
|
||||
"conventional w/ fall till",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
],
|
||||
["PEST MANAGEMENT:", "Roundup twice", "", "", "", "", "", "", "", "", ""],
|
||||
[
|
||||
"SEEDED - RATE:",
|
||||
"May 15",
|
||||
"140 000 /A",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"TOP 30 for YIELD of 63 TESTED",
|
||||
"",
|
||||
],
|
||||
[
|
||||
"HARVESTED - STAND:",
|
||||
"Oct 3",
|
||||
"122 921 /A",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"AVERAGE of (3) REPLICATIONS",
|
||||
"",
|
||||
],
|
||||
["", "", "", "", "SCN", "Seed", "Yield", "Moisture", "Lodging", "Stand", "Gross"],
|
||||
[
|
||||
"Company/Brand",
|
||||
"Product/Brand†",
|
||||
"Technol.†",
|
||||
"Mat.",
|
||||
"Resist.",
|
||||
"Trmt.†",
|
||||
"Bu/A",
|
||||
"%",
|
||||
"%",
|
||||
"(x 1000)",
|
||||
"Income",
|
||||
],
|
||||
[
|
||||
"Kruger",
|
||||
"K2 1901",
|
||||
"RR2Y",
|
||||
"1.9",
|
||||
"R",
|
||||
"Ac,PV",
|
||||
"56.4",
|
||||
"7.6",
|
||||
"0",
|
||||
"126.3",
|
||||
"$846",
|
||||
],
|
||||
[
|
||||
"Stine",
|
||||
"19RA02 §",
|
||||
"RR2Y",
|
||||
"1.9",
|
||||
"R",
|
||||
"CMB",
|
||||
"55.3",
|
||||
"7.6",
|
||||
"0",
|
||||
"120.0",
|
||||
"$830",
|
||||
],
|
||||
[
|
||||
"Wensman",
|
||||
"W 3190NR2",
|
||||
"RR2Y",
|
||||
"1.9",
|
||||
"R",
|
||||
"Ac",
|
||||
"54.5",
|
||||
"7.6",
|
||||
"0",
|
||||
"119.5",
|
||||
"$818",
|
||||
],
|
||||
["Hefty", "H17Y12", "RR2Y", "1.7", "MR", "I", "53.7", "7.7", "0", "124.4", "$806"],
|
||||
[
|
||||
"Dyna-Gro",
|
||||
"S15RY53",
|
||||
"RR2Y",
|
||||
"1.5",
|
||||
"R",
|
||||
"Ac",
|
||||
"53.6",
|
||||
"7.7",
|
||||
"0",
|
||||
"126.8",
|
||||
"$804",
|
||||
],
|
||||
[
|
||||
"LG Seeds",
|
||||
"C2050R2",
|
||||
"RR2Y",
|
||||
"2.1",
|
||||
"R",
|
||||
"Ac",
|
||||
"53.6",
|
||||
"7.7",
|
||||
"0",
|
||||
"123.9",
|
||||
"$804",
|
||||
],
|
||||
[
|
||||
"Titan Pro",
|
||||
"19M42",
|
||||
"RR2Y",
|
||||
"1.9",
|
||||
"R",
|
||||
"CMB",
|
||||
"53.6",
|
||||
"7.7",
|
||||
"0",
|
||||
"121.0",
|
||||
"$804",
|
||||
],
|
||||
[
|
||||
"Stine",
|
||||
"19RA02 (2) §",
|
||||
"RR2Y",
|
||||
"1.9",
|
||||
"R",
|
||||
"CMB",
|
||||
"53.4",
|
||||
"7.7",
|
||||
"0",
|
||||
"123.9",
|
||||
"$801",
|
||||
],
|
||||
[
|
||||
"Asgrow",
|
||||
"AG1832 §",
|
||||
"RR2Y",
|
||||
"1.8",
|
||||
"MR",
|
||||
"Ac,PV",
|
||||
"52.9",
|
||||
"7.7",
|
||||
"0",
|
||||
"122.0",
|
||||
"$794",
|
||||
],
|
||||
[
|
||||
"Prairie Brand",
|
||||
"PB-1566R2",
|
||||
"RR2Y",
|
||||
"1.5",
|
||||
"R",
|
||||
"CMB",
|
||||
"52.8",
|
||||
"7.7",
|
||||
"0",
|
||||
"122.9",
|
||||
"$792",
|
||||
],
|
||||
[
|
||||
"Channel",
|
||||
"1901R2",
|
||||
"RR2Y",
|
||||
"1.9",
|
||||
"R",
|
||||
"Ac,PV",
|
||||
"52.8",
|
||||
"7.6",
|
||||
"0",
|
||||
"123.4",
|
||||
"$791",
|
||||
],
|
||||
[
|
||||
"Titan Pro",
|
||||
"20M1",
|
||||
"RR2Y",
|
||||
"2.0",
|
||||
"R",
|
||||
"Am",
|
||||
"52.5",
|
||||
"7.5",
|
||||
"0",
|
||||
"124.4",
|
||||
"$788",
|
||||
],
|
||||
[
|
||||
"Kruger",
|
||||
"K2-2002",
|
||||
"RR2Y",
|
||||
"2.0",
|
||||
"R",
|
||||
"Ac,PV",
|
||||
"52.4",
|
||||
"7.9",
|
||||
"0",
|
||||
"125.4",
|
||||
"$786",
|
||||
],
|
||||
[
|
||||
"Channel",
|
||||
"1700R2",
|
||||
"RR2Y",
|
||||
"1.7",
|
||||
"R",
|
||||
"Ac,PV",
|
||||
"52.3",
|
||||
"7.9",
|
||||
"0",
|
||||
"123.9",
|
||||
"$784",
|
||||
],
|
||||
["Hefty", "H16Y11", "RR2Y", "1.6", "MR", "I", "51.4", "7.6", "0", "123.9", "$771"],
|
||||
[
|
||||
"Anderson",
|
||||
"162R2Y",
|
||||
"RR2Y",
|
||||
"1.6",
|
||||
"R",
|
||||
"None",
|
||||
"51.3",
|
||||
"7.5",
|
||||
"0",
|
||||
"119.5",
|
||||
"$770",
|
||||
],
|
||||
[
|
||||
"Titan Pro",
|
||||
"15M22",
|
||||
"RR2Y",
|
||||
"1.5",
|
||||
"R",
|
||||
"CMB",
|
||||
"51.3",
|
||||
"7.8",
|
||||
"0",
|
||||
"125.4",
|
||||
"$769",
|
||||
],
|
||||
[
|
||||
"Dairyland",
|
||||
"DSR-1710R2Y",
|
||||
"RR2Y",
|
||||
"1.7",
|
||||
"R",
|
||||
"CMB",
|
||||
"51.3",
|
||||
"7.7",
|
||||
"0",
|
||||
"122.0",
|
||||
"$769",
|
||||
],
|
||||
["Hefty", "H20R3", "RR2Y", "2.0", "MR", "I", "50.5", "8.2", "0", "121.0", "$757"],
|
||||
[
|
||||
"Prairie Brand",
|
||||
"PB 1743R2",
|
||||
"RR2Y",
|
||||
"1.7",
|
||||
"R",
|
||||
"CMB",
|
||||
"50.2",
|
||||
"7.7",
|
||||
"0",
|
||||
"125.8",
|
||||
"$752",
|
||||
],
|
||||
[
|
||||
"Gold Country",
|
||||
"1741",
|
||||
"RR2Y",
|
||||
"1.7",
|
||||
"R",
|
||||
"Ac",
|
||||
"50.1",
|
||||
"7.8",
|
||||
"0",
|
||||
"123.9",
|
||||
"$751",
|
||||
],
|
||||
[
|
||||
"Trelay",
|
||||
"20RR43",
|
||||
"RR2Y",
|
||||
"2.0",
|
||||
"R",
|
||||
"Ac,Ex",
|
||||
"49.9",
|
||||
"7.6",
|
||||
"0",
|
||||
"127.8",
|
||||
"$749",
|
||||
],
|
||||
["Hefty", "H14R3", "RR2Y", "1.4", "MR", "I", "49.7", "7.7", "0", "122.9", "$746"],
|
||||
[
|
||||
"Prairie Brand",
|
||||
"PB-2099NRR2",
|
||||
"RR2Y",
|
||||
"2.0",
|
||||
"R",
|
||||
"CMB",
|
||||
"49.6",
|
||||
"7.8",
|
||||
"0",
|
||||
"126.3",
|
||||
"$743",
|
||||
],
|
||||
[
|
||||
"Wensman",
|
||||
"W 3174NR2",
|
||||
"RR2Y",
|
||||
"1.7",
|
||||
"R",
|
||||
"Ac",
|
||||
"49.3",
|
||||
"7.6",
|
||||
"0",
|
||||
"122.5",
|
||||
"$740",
|
||||
],
|
||||
[
|
||||
"Kruger",
|
||||
"K2 1602",
|
||||
"RR2Y",
|
||||
"1.6",
|
||||
"R",
|
||||
"Ac,PV",
|
||||
"48.7",
|
||||
"7.6",
|
||||
"0",
|
||||
"125.4",
|
||||
"$731",
|
||||
],
|
||||
[
|
||||
"NK Brand",
|
||||
"S18-C2 §",
|
||||
"RR2Y",
|
||||
"1.8",
|
||||
"R",
|
||||
"CMB",
|
||||
"48.7",
|
||||
"7.7",
|
||||
"0",
|
||||
"126.8",
|
||||
"$731",
|
||||
],
|
||||
[
|
||||
"Kruger",
|
||||
"K2 1902",
|
||||
"RR2Y",
|
||||
"1.9",
|
||||
"R",
|
||||
"Ac,PV",
|
||||
"48.7",
|
||||
"7.5",
|
||||
"0",
|
||||
"124.4",
|
||||
"$730",
|
||||
],
|
||||
[
|
||||
"Prairie Brand",
|
||||
"PB-1823R2",
|
||||
"RR2Y",
|
||||
"1.8",
|
||||
"R",
|
||||
"None",
|
||||
"48.5",
|
||||
"7.6",
|
||||
"0",
|
||||
"121.0",
|
||||
"$727",
|
||||
],
|
||||
[
|
||||
"Gold Country",
|
||||
"1541",
|
||||
"RR2Y",
|
||||
"1.5",
|
||||
"R",
|
||||
"Ac",
|
||||
"48.4",
|
||||
"7.6",
|
||||
"0",
|
||||
"110.4",
|
||||
"$726",
|
||||
],
|
||||
["", "", "", "", "", "Test Average =", "47.6", "7.7", "0", "122.9", "$713"],
|
||||
["", "", "", "", "", "LSD (0.10) =", "5.7", "0.3", "ns", "37.8", "566.4"],
|
||||
]
|
||||
|
|
|
|||
|
Before Width: | Height: | Size: 8.2 KiB After Width: | Height: | Size: 8.2 KiB |
|
After Width: | Height: | Size: 8.1 KiB |
|
Before Width: | Height: | Size: 48 KiB After Width: | Height: | Size: 48 KiB |
|
After Width: | Height: | Size: 66 KiB |
|
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
|
After Width: | Height: | Size: 64 KiB |
|
Before Width: | Height: | Size: 6.7 KiB After Width: | Height: | Size: 6.7 KiB |
|
After Width: | Height: | Size: 6.6 KiB |
|
|
@ -1,7 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from camelot.cli import cli
|
||||
|
|
@ -11,6 +13,11 @@ from camelot.utils import TemporaryDirectory
|
|||
testdir = os.path.dirname(os.path.abspath(__file__))
|
||||
testdir = os.path.join(testdir, "files")
|
||||
|
||||
skip_on_windows = pytest.mark.skipif(
|
||||
sys.platform.startswith("win"),
|
||||
reason="Ghostscript not installed in Windows test environment",
|
||||
)
|
||||
|
||||
|
||||
def test_help_output():
|
||||
runner = CliRunner()
|
||||
|
|
@ -26,6 +33,7 @@ def test_help_output():
|
|||
)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_cli_lattice():
|
||||
with TemporaryDirectory() as tempdir:
|
||||
infile = os.path.join(testdir, "foo.pdf")
|
||||
|
|
@ -35,7 +43,7 @@ def test_cli_lattice():
|
|||
cli, ["--format", "csv", "--output", outfile, "lattice", infile]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert result.output == "Found 1 tables\n"
|
||||
assert "Found 1 tables" in result.output
|
||||
|
||||
result = runner.invoke(cli, ["--format", "csv", "lattice", infile])
|
||||
output_error = "Error: Please specify output file path using --output"
|
||||
|
|
@ -123,7 +131,7 @@ def test_cli_output_format():
|
|||
cli,
|
||||
["--format", "json", "--output", outfile, "stream", infile],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, f"Output: {result.output}"
|
||||
|
||||
# excel
|
||||
outfile = os.path.join(tempdir, "health.xlsx")
|
||||
|
|
@ -131,7 +139,7 @@ def test_cli_output_format():
|
|||
cli,
|
||||
["--format", "excel", "--output", outfile, "stream", infile],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, f"Output: {result.output}"
|
||||
|
||||
# html
|
||||
outfile = os.path.join(tempdir, "health.html")
|
||||
|
|
@ -139,7 +147,15 @@ def test_cli_output_format():
|
|||
cli,
|
||||
["--format", "html", "--output", outfile, "stream", infile],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, f"Output: {result.output}"
|
||||
|
||||
# markdown
|
||||
outfile = os.path.join(tempdir, "health.md")
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
["--format", "markdown", "--output", outfile, "stream", infile],
|
||||
)
|
||||
assert result.exit_code == 0, f"Output: {result.output}"
|
||||
|
||||
# zip
|
||||
outfile = os.path.join(tempdir, "health.csv")
|
||||
|
|
@ -155,7 +171,7 @@ def test_cli_output_format():
|
|||
infile,
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert result.exit_code == 0, f"Output: {result.output}"
|
||||
|
||||
|
||||
def test_cli_quiet():
|
||||
|
|
|
|||
|
|
@ -1,20 +1,45 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
import pandas as pd
|
||||
from pandas.testing import assert_frame_equal
|
||||
|
||||
import camelot
|
||||
from camelot.io import PDFHandler
|
||||
from camelot.core import Table, TableList
|
||||
from camelot.__version__ import generate_version
|
||||
from camelot.backends import ImageConversionBackend
|
||||
|
||||
from .data import *
|
||||
|
||||
testdir = os.path.dirname(os.path.abspath(__file__))
|
||||
testdir = os.path.join(testdir, "files")
|
||||
|
||||
skip_on_windows = pytest.mark.skipif(
|
||||
sys.platform.startswith("win"),
|
||||
reason="Ghostscript not installed in Windows test environment",
|
||||
)
|
||||
|
||||
|
||||
def test_version_generation():
|
||||
version = (0, 7, 3)
|
||||
assert generate_version(version, prerelease=None, revision=None) == "0.7.3"
|
||||
|
||||
|
||||
def test_version_generation_with_prerelease_revision():
|
||||
version = (0, 7, 3)
|
||||
prerelease = "alpha"
|
||||
revision = 2
|
||||
assert (
|
||||
generate_version(version, prerelease=prerelease, revision=revision)
|
||||
== "0.7.3-alpha.2"
|
||||
)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_parsing_report():
|
||||
parsing_report = {"accuracy": 99.02, "whitespace": 12.24, "order": 1, "page": 1}
|
||||
|
||||
|
|
@ -34,246 +59,75 @@ def test_password():
|
|||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream():
|
||||
df = pd.DataFrame(data_stream)
|
||||
|
||||
filename = os.path.join(testdir, "health.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_table_rotated():
|
||||
df = pd.DataFrame(data_stream_table_rotated)
|
||||
|
||||
filename = os.path.join(testdir, "clockwise_table_2.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
filename = os.path.join(testdir, "anticlockwise_table_2.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_two_tables():
|
||||
df1 = pd.DataFrame(data_stream_two_tables_1)
|
||||
df2 = pd.DataFrame(data_stream_two_tables_2)
|
||||
|
||||
filename = os.path.join(testdir, "tabula/12s0324.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
|
||||
assert len(tables) == 2
|
||||
assert df1.equals(tables[0].df)
|
||||
assert df2.equals(tables[1].df)
|
||||
|
||||
|
||||
def test_stream_table_regions():
|
||||
df = pd.DataFrame(data_stream_table_areas)
|
||||
|
||||
filename = os.path.join(testdir, "tabula/us-007.pdf")
|
||||
tables = camelot.read_pdf(
|
||||
filename, flavor="stream", table_regions=["320,460,573,335"]
|
||||
)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_table_areas():
|
||||
df = pd.DataFrame(data_stream_table_areas)
|
||||
|
||||
filename = os.path.join(testdir, "tabula/us-007.pdf")
|
||||
tables = camelot.read_pdf(
|
||||
filename, flavor="stream", table_areas=["320,500,573,335"]
|
||||
)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_columns():
|
||||
df = pd.DataFrame(data_stream_columns)
|
||||
|
||||
filename = os.path.join(testdir, "mexican_towns.pdf")
|
||||
tables = camelot.read_pdf(
|
||||
filename, flavor="stream", columns=["67,180,230,425,475"], row_tol=10
|
||||
)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_split_text():
|
||||
df = pd.DataFrame(data_stream_split_text)
|
||||
|
||||
filename = os.path.join(testdir, "tabula/m27.pdf")
|
||||
tables = camelot.read_pdf(
|
||||
filename,
|
||||
flavor="stream",
|
||||
columns=["72,95,209,327,442,529,566,606,683"],
|
||||
split_text=True,
|
||||
)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_flag_size():
|
||||
df = pd.DataFrame(data_stream_flag_size)
|
||||
|
||||
filename = os.path.join(testdir, "superscript.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream", flag_size=True)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_strip_text():
|
||||
df = pd.DataFrame(data_stream_strip_text)
|
||||
|
||||
filename = os.path.join(testdir, "detect_vertical_false.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream", strip_text=" ,\n")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_edge_tol():
|
||||
df = pd.DataFrame(data_stream_edge_tol)
|
||||
|
||||
filename = os.path.join(testdir, "edge_tol.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream", edge_tol=500)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_layout_kwargs():
|
||||
df = pd.DataFrame(data_stream_layout_kwargs)
|
||||
|
||||
filename = os.path.join(testdir, "detect_vertical_false.pdf")
|
||||
tables = camelot.read_pdf(
|
||||
filename, flavor="stream", layout_kwargs={"detect_vertical": False}
|
||||
)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_lattice():
|
||||
df = pd.DataFrame(data_lattice)
|
||||
|
||||
filename = os.path.join(
|
||||
testdir, "tabula/icdar2013-dataset/competition-dataset-us/us-030.pdf"
|
||||
)
|
||||
tables = camelot.read_pdf(filename, pages="2")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_lattice_table_rotated():
|
||||
df = pd.DataFrame(data_lattice_table_rotated)
|
||||
|
||||
filename = os.path.join(testdir, "clockwise_table_1.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
filename = os.path.join(testdir, "anticlockwise_table_1.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_lattice_two_tables():
|
||||
df1 = pd.DataFrame(data_lattice_two_tables_1)
|
||||
df2 = pd.DataFrame(data_lattice_two_tables_2)
|
||||
|
||||
filename = os.path.join(testdir, "twotables_2.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert len(tables) == 2
|
||||
assert df1.equals(tables[0].df)
|
||||
assert df2.equals(tables[1].df)
|
||||
|
||||
|
||||
def test_lattice_table_regions():
|
||||
df = pd.DataFrame(data_lattice_table_regions)
|
||||
|
||||
filename = os.path.join(testdir, "table_region.pdf")
|
||||
tables = camelot.read_pdf(filename, table_regions=["170,370,560,270"])
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_lattice_table_areas():
|
||||
df = pd.DataFrame(data_lattice_table_areas)
|
||||
|
||||
filename = os.path.join(testdir, "twotables_2.pdf")
|
||||
tables = camelot.read_pdf(filename, table_areas=["80,693,535,448"])
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_lattice_process_background():
|
||||
df = pd.DataFrame(data_lattice_process_background)
|
||||
|
||||
filename = os.path.join(testdir, "background_lines_1.pdf")
|
||||
tables = camelot.read_pdf(filename, process_background=True)
|
||||
assert_frame_equal(df, tables[1].df)
|
||||
|
||||
|
||||
def test_lattice_copy_text():
|
||||
df = pd.DataFrame(data_lattice_copy_text)
|
||||
|
||||
filename = os.path.join(testdir, "row_span_1.pdf")
|
||||
tables = camelot.read_pdf(filename, line_scale=60, copy_text="v")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_lattice_shift_text():
|
||||
df_lt = pd.DataFrame(data_lattice_shift_text_left_top)
|
||||
df_disable = pd.DataFrame(data_lattice_shift_text_disable)
|
||||
df_rb = pd.DataFrame(data_lattice_shift_text_right_bottom)
|
||||
|
||||
filename = os.path.join(testdir, "column_span_2.pdf")
|
||||
tables = camelot.read_pdf(filename, line_scale=40)
|
||||
assert df_lt.equals(tables[0].df)
|
||||
|
||||
tables = camelot.read_pdf(filename, line_scale=40, shift_text=[""])
|
||||
assert df_disable.equals(tables[0].df)
|
||||
|
||||
tables = camelot.read_pdf(filename, line_scale=40, shift_text=["r", "b"])
|
||||
assert df_rb.equals(tables[0].df)
|
||||
|
||||
|
||||
def test_repr():
|
||||
def test_repr_poppler():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
tables = camelot.read_pdf(filename, backend="poppler")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert (
|
||||
repr(tables[0].cells[0][0]) == "<Cell x1=120.48 y1=218.43 x2=164.64 y2=233.77>"
|
||||
)
|
||||
assert repr(tables[0].cells[0][0]) == "<Cell x1=120 y1=219 x2=165 y2=234>"
|
||||
|
||||
|
||||
def test_pages():
|
||||
@skip_on_windows
|
||||
def test_repr_ghostscript():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename, backend="ghostscript")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert repr(tables[0].cells[0][0]) == "<Cell x1=120 y1=218 x2=165 y2=234>"
|
||||
|
||||
|
||||
def test_url_poppler():
|
||||
url = "https://camelot-py.readthedocs.io/en/master/_static/pdf/foo.pdf"
|
||||
tables = camelot.read_pdf(url)
|
||||
tables = camelot.read_pdf(url, backend="poppler")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert (
|
||||
repr(tables[0].cells[0][0]) == "<Cell x1=120.48 y1=218.43 x2=164.64 y2=233.77>"
|
||||
)
|
||||
|
||||
tables = camelot.read_pdf(url, pages="1-end")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert (
|
||||
repr(tables[0].cells[0][0]) == "<Cell x1=120.48 y1=218.43 x2=164.64 y2=233.77>"
|
||||
)
|
||||
|
||||
tables = camelot.read_pdf(url, pages="all")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert (
|
||||
repr(tables[0].cells[0][0]) == "<Cell x1=120.48 y1=218.43 x2=164.64 y2=233.77>"
|
||||
)
|
||||
assert repr(tables[0].cells[0][0]) == "<Cell x1=120 y1=219 x2=165 y2=234>"
|
||||
|
||||
|
||||
def test_url():
|
||||
@skip_on_windows
|
||||
def test_url_ghostscript():
|
||||
url = "https://camelot-py.readthedocs.io/en/master/_static/pdf/foo.pdf"
|
||||
tables = camelot.read_pdf(url)
|
||||
tables = camelot.read_pdf(url, backend="ghostscript")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert (
|
||||
repr(tables[0].cells[0][0]) == "<Cell x1=120.48 y1=218.43 x2=164.64 y2=233.77>"
|
||||
)
|
||||
assert repr(tables[0].cells[0][0]) == "<Cell x1=120 y1=218 x2=165 y2=234>"
|
||||
|
||||
|
||||
def test_arabic():
|
||||
df = pd.DataFrame(data_arabic)
|
||||
def test_pages_poppler():
|
||||
url = "https://camelot-py.readthedocs.io/en/master/_static/pdf/foo.pdf"
|
||||
tables = camelot.read_pdf(url, backend="poppler")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert repr(tables[0].cells[0][0]) == "<Cell x1=120 y1=219 x2=165 y2=234>"
|
||||
|
||||
filename = os.path.join(testdir, "tabula/arabic.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
tables = camelot.read_pdf(url, pages="1-end", backend="poppler")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert repr(tables[0].cells[0][0]) == "<Cell x1=120 y1=219 x2=165 y2=234>"
|
||||
|
||||
tables = camelot.read_pdf(url, pages="all", backend="poppler")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert repr(tables[0].cells[0][0]) == "<Cell x1=120 y1=219 x2=165 y2=234>"
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_pages_ghostscript():
|
||||
url = "https://camelot-py.readthedocs.io/en/master/_static/pdf/foo.pdf"
|
||||
tables = camelot.read_pdf(url, backend="ghostscript")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert repr(tables[0].cells[0][0]) == "<Cell x1=120 y1=218 x2=165 y2=234>"
|
||||
|
||||
tables = camelot.read_pdf(url, pages="1-end", backend="ghostscript")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert repr(tables[0].cells[0][0]) == "<Cell x1=120 y1=218 x2=165 y2=234>"
|
||||
|
||||
tables = camelot.read_pdf(url, pages="all", backend="ghostscript")
|
||||
assert repr(tables) == "<TableList n=1>"
|
||||
assert repr(tables[0]) == "<Table shape=(7, 7)>"
|
||||
assert repr(tables[0].cells[0][0]) == "<Cell x1=120 y1=218 x2=165 y2=234>"
|
||||
|
||||
|
||||
def test_table_order():
|
||||
|
|
@ -301,24 +155,20 @@ def test_table_order():
|
|||
]
|
||||
|
||||
|
||||
def test_version_generation():
|
||||
version = (0, 7, 3)
|
||||
assert generate_version(version, prerelease=None, revision=None) == "0.7.3"
|
||||
def test_handler_pages_generator():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
|
||||
handler = PDFHandler(filename)
|
||||
assert handler._get_pages("1") == [1]
|
||||
|
||||
def test_version_generation_with_prerelease_revision():
|
||||
version = (0, 7, 3)
|
||||
prerelease = "alpha"
|
||||
revision = 2
|
||||
assert (
|
||||
generate_version(version, prerelease=prerelease, revision=revision)
|
||||
== "0.7.3-alpha.2"
|
||||
)
|
||||
handler = PDFHandler(filename)
|
||||
assert handler._get_pages("all") == [1]
|
||||
|
||||
handler = PDFHandler(filename)
|
||||
assert handler._get_pages("1-end") == [1]
|
||||
|
||||
def test_stream_duplicated_text():
|
||||
df = pd.DataFrame(data_stream_duplicated_text)
|
||||
handler = PDFHandler(filename)
|
||||
assert handler._get_pages("1,2,3,4") == [1, 2, 3, 4]
|
||||
|
||||
filename = os.path.join(testdir, "birdisland.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
handler = PDFHandler(filename)
|
||||
assert handler._get_pages("1,2,5-10") == [1, 2, 5, 6, 7, 8, 9, 10]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import pytest
|
||||
|
|
@ -12,6 +13,11 @@ testdir = os.path.dirname(os.path.abspath(__file__))
|
|||
testdir = os.path.join(testdir, "files")
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
|
||||
skip_on_windows = pytest.mark.skipif(
|
||||
sys.platform.startswith("win"),
|
||||
reason="Ghostscript not installed in Windows test environment",
|
||||
)
|
||||
|
||||
|
||||
def test_unknown_flavor():
|
||||
message = "Unknown flavor specified." " Use either 'lattice' or 'stream'"
|
||||
|
|
@ -32,56 +38,7 @@ def test_unsupported_format():
|
|||
tables = camelot.read_pdf(filename)
|
||||
|
||||
|
||||
def test_stream_equal_length():
|
||||
message = "Length of table_areas and columns" " should be equal"
|
||||
with pytest.raises(ValueError, match=message):
|
||||
tables = camelot.read_pdf(
|
||||
filename,
|
||||
flavor="stream",
|
||||
table_areas=["10,20,30,40"],
|
||||
columns=["10,20,30,40", "10,20,30,40"],
|
||||
)
|
||||
|
||||
|
||||
def test_image_warning():
|
||||
filename = os.path.join(testdir, "image.pdf")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
with pytest.raises(UserWarning) as e:
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert (
|
||||
str(e.value)
|
||||
== "page-1 is image-based, camelot only works on text-based pages."
|
||||
)
|
||||
|
||||
|
||||
def test_lattice_no_tables_on_page():
|
||||
filename = os.path.join(testdir, "empty.pdf")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
with pytest.raises(UserWarning) as e:
|
||||
tables = camelot.read_pdf(filename, flavor="lattice")
|
||||
assert str(e.value) == "No tables found on page-1"
|
||||
|
||||
|
||||
def test_stream_no_tables_on_page():
|
||||
filename = os.path.join(testdir, "empty.pdf")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
with pytest.raises(UserWarning) as e:
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert str(e.value) == "No tables found on page-1"
|
||||
|
||||
|
||||
def test_stream_no_tables_in_area():
|
||||
filename = os.path.join(testdir, "only_page_number.pdf")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
with pytest.raises(UserWarning) as e:
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert str(e.value) == "No tables found in table area 1"
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_no_tables_found_logs_suppressed():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
with warnings.catch_warnings():
|
||||
|
|
@ -118,3 +75,81 @@ def test_bad_password():
|
|||
message = "file has not been decrypted"
|
||||
with pytest.raises(Exception, match=message):
|
||||
tables = camelot.read_pdf(filename, password="wrongpass")
|
||||
|
||||
|
||||
def test_stream_equal_length():
|
||||
message = "Length of table_areas and columns" " should be equal"
|
||||
with pytest.raises(ValueError, match=message):
|
||||
tables = camelot.read_pdf(
|
||||
filename,
|
||||
flavor="stream",
|
||||
table_areas=["10,20,30,40"],
|
||||
columns=["10,20,30,40", "10,20,30,40"],
|
||||
)
|
||||
|
||||
|
||||
def test_image_warning():
|
||||
filename = os.path.join(testdir, "image.pdf")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error", category=UserWarning)
|
||||
with pytest.raises(UserWarning) as e:
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert (
|
||||
str(e.value)
|
||||
== "page-1 is image-based, camelot only works on text-based pages."
|
||||
)
|
||||
|
||||
|
||||
def test_stream_no_tables_on_page():
|
||||
filename = os.path.join(testdir, "empty.pdf")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
with pytest.raises(UserWarning) as e:
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert str(e.value) == "No tables found on page-1"
|
||||
|
||||
|
||||
def test_stream_no_tables_in_area():
|
||||
filename = os.path.join(testdir, "only_page_number.pdf")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
with pytest.raises(UserWarning) as e:
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert str(e.value) == "No tables found in table area 1"
|
||||
|
||||
|
||||
def test_lattice_no_tables_on_page():
|
||||
filename = os.path.join(testdir, "empty.pdf")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error", category=UserWarning)
|
||||
with pytest.raises(UserWarning) as e:
|
||||
tables = camelot.read_pdf(filename, flavor="lattice")
|
||||
assert str(e.value) == "No tables found on page-1"
|
||||
|
||||
|
||||
def test_lattice_unknown_backend():
|
||||
message = "Unknown backend 'mupdf' specified. Please use either 'poppler' or 'ghostscript'."
|
||||
with pytest.raises(NotImplementedError, match=message):
|
||||
tables = camelot.read_pdf(filename, backend="mupdf")
|
||||
|
||||
|
||||
def test_lattice_no_convert_method():
|
||||
class ConversionBackend(object):
|
||||
pass
|
||||
|
||||
message = "must implement a 'convert' method"
|
||||
with pytest.raises(NotImplementedError, match=message):
|
||||
tables = camelot.read_pdf(filename, backend=ConversionBackend())
|
||||
|
||||
|
||||
def test_lattice_ghostscript_deprecation_warning():
|
||||
ghostscript_deprecation_warning = (
|
||||
"'ghostscript' will be replaced by 'poppler' as the default image conversion"
|
||||
" backend in v0.12.0. You can try out 'poppler' with backend='poppler'."
|
||||
)
|
||||
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
with pytest.raises(DeprecationWarning) as e:
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert str(e.value) == ghostscript_deprecation_warning
|
||||
|
|
|
|||
|
|
@ -0,0 +1,60 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pytest
|
||||
|
||||
import camelot.backends.image_conversion
|
||||
from camelot.backends import ImageConversionBackend
|
||||
|
||||
|
||||
class PopplerBackendError(object):
|
||||
def convert(self, pdf_path, png_path):
|
||||
raise ValueError("Image conversion failed")
|
||||
|
||||
|
||||
class GhostscriptBackendError(object):
|
||||
def convert(self, pdf_path, png_path):
|
||||
raise ValueError("Image conversion failed")
|
||||
|
||||
|
||||
class GhostscriptBackendNoError(object):
|
||||
def convert(self, pdf_path, png_path):
|
||||
pass
|
||||
|
||||
|
||||
def test_poppler_backend_error_when_no_use_fallback(monkeypatch):
|
||||
BACKENDS = {
|
||||
"poppler": PopplerBackendError,
|
||||
"ghostscript": GhostscriptBackendNoError,
|
||||
}
|
||||
monkeypatch.setattr(
|
||||
"camelot.backends.image_conversion.BACKENDS", BACKENDS, raising=True
|
||||
)
|
||||
backend = ImageConversionBackend(use_fallback=False)
|
||||
|
||||
message = "Image conversion failed with image conversion backend 'poppler'"
|
||||
with pytest.raises(ValueError, match=message):
|
||||
backend.convert("foo", "bar")
|
||||
|
||||
|
||||
def test_ghostscript_backend_when_use_fallback(monkeypatch):
|
||||
BACKENDS = {
|
||||
"poppler": PopplerBackendError,
|
||||
"ghostscript": GhostscriptBackendNoError,
|
||||
}
|
||||
monkeypatch.setattr(
|
||||
"camelot.backends.image_conversion.BACKENDS", BACKENDS, raising=True
|
||||
)
|
||||
backend = ImageConversionBackend()
|
||||
backend.convert("foo", "bar")
|
||||
|
||||
|
||||
def test_ghostscript_backend_error_when_use_fallback(monkeypatch):
|
||||
BACKENDS = {"poppler": PopplerBackendError, "ghostscript": GhostscriptBackendError}
|
||||
monkeypatch.setattr(
|
||||
"camelot.backends.image_conversion.BACKENDS", BACKENDS, raising=True
|
||||
)
|
||||
backend = ImageConversionBackend()
|
||||
|
||||
message = "Image conversion failed with image conversion backend 'ghostscript'"
|
||||
with pytest.raises(ValueError, match=message):
|
||||
backend.convert("foo", "bar")
|
||||
|
|
@ -0,0 +1,120 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
import pandas as pd
|
||||
from pandas.testing import assert_frame_equal
|
||||
|
||||
import camelot
|
||||
from camelot.core import Table, TableList
|
||||
from camelot.__version__ import generate_version
|
||||
|
||||
from .data import *
|
||||
|
||||
testdir = os.path.dirname(os.path.abspath(__file__))
|
||||
testdir = os.path.join(testdir, "files")
|
||||
|
||||
skip_on_windows = pytest.mark.skipif(
|
||||
sys.platform.startswith("win"),
|
||||
reason="Ghostscript not installed in Windows test environment",
|
||||
)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_lattice():
|
||||
df = pd.DataFrame(data_lattice)
|
||||
|
||||
filename = os.path.join(
|
||||
testdir, "tabula/icdar2013-dataset/competition-dataset-us/us-030.pdf"
|
||||
)
|
||||
tables = camelot.read_pdf(filename, pages="2")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_lattice_table_rotated():
|
||||
df = pd.DataFrame(data_lattice_table_rotated)
|
||||
|
||||
filename = os.path.join(testdir, "clockwise_table_1.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
filename = os.path.join(testdir, "anticlockwise_table_1.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_lattice_two_tables():
|
||||
df1 = pd.DataFrame(data_lattice_two_tables_1)
|
||||
df2 = pd.DataFrame(data_lattice_two_tables_2)
|
||||
|
||||
filename = os.path.join(testdir, "twotables_2.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert len(tables) == 2
|
||||
assert df1.equals(tables[0].df)
|
||||
assert df2.equals(tables[1].df)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_lattice_table_regions():
|
||||
df = pd.DataFrame(data_lattice_table_regions)
|
||||
|
||||
filename = os.path.join(testdir, "table_region.pdf")
|
||||
tables = camelot.read_pdf(filename, table_regions=["170,370,560,270"])
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_lattice_table_areas():
|
||||
df = pd.DataFrame(data_lattice_table_areas)
|
||||
|
||||
filename = os.path.join(testdir, "twotables_2.pdf")
|
||||
tables = camelot.read_pdf(filename, table_areas=["80,693,535,448"])
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_lattice_process_background():
|
||||
df = pd.DataFrame(data_lattice_process_background)
|
||||
|
||||
filename = os.path.join(testdir, "background_lines_1.pdf")
|
||||
tables = camelot.read_pdf(filename, process_background=True)
|
||||
assert_frame_equal(df, tables[1].df)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_lattice_copy_text():
|
||||
df = pd.DataFrame(data_lattice_copy_text)
|
||||
|
||||
filename = os.path.join(testdir, "row_span_1.pdf")
|
||||
tables = camelot.read_pdf(filename, line_scale=60, copy_text="v")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_lattice_shift_text():
|
||||
df_lt = pd.DataFrame(data_lattice_shift_text_left_top)
|
||||
df_disable = pd.DataFrame(data_lattice_shift_text_disable)
|
||||
df_rb = pd.DataFrame(data_lattice_shift_text_right_bottom)
|
||||
|
||||
filename = os.path.join(testdir, "column_span_2.pdf")
|
||||
tables = camelot.read_pdf(filename, line_scale=40)
|
||||
assert df_lt.equals(tables[0].df)
|
||||
|
||||
tables = camelot.read_pdf(filename, line_scale=40, shift_text=[""])
|
||||
assert df_disable.equals(tables[0].df)
|
||||
|
||||
tables = camelot.read_pdf(filename, line_scale=40, shift_text=["r", "b"])
|
||||
assert df_rb.equals(tables[0].df)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
def test_lattice_arabic():
|
||||
df = pd.DataFrame(data_arabic)
|
||||
|
||||
filename = os.path.join(testdir, "tabula/arabic.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
|
@ -1,16 +1,22 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import camelot
|
||||
|
||||
|
||||
testdir = os.path.dirname(os.path.abspath(__file__))
|
||||
testdir = os.path.join(testdir, "files")
|
||||
|
||||
skip_on_windows = pytest.mark.skipif(
|
||||
sys.platform.startswith("win"),
|
||||
reason="Ghostscript not installed in Windows test environment",
|
||||
)
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
@pytest.mark.mpl_image_compare(baseline_dir="files/baseline_plots", remove_text=True)
|
||||
def test_text_plot():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
|
|
@ -19,16 +25,24 @@ def test_text_plot():
|
|||
|
||||
|
||||
@pytest.mark.mpl_image_compare(baseline_dir="files/baseline_plots", remove_text=True)
|
||||
def test_grid_plot():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
return camelot.plot(tables[0], kind="grid")
|
||||
def test_textedge_plot():
|
||||
filename = os.path.join(testdir, "tabula/12s0324.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
return camelot.plot(tables[0], kind="textedge")
|
||||
|
||||
|
||||
@pytest.mark.mpl_image_compare(baseline_dir="files/baseline_plots", remove_text=True)
|
||||
def test_lattice_contour_plot():
|
||||
def test_lattice_contour_plot_poppler():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
tables = camelot.read_pdf(filename, backend="poppler")
|
||||
return camelot.plot(tables[0], kind="contour")
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
@pytest.mark.mpl_image_compare(baseline_dir="files/baseline_plots", remove_text=True)
|
||||
def test_lattice_contour_plot_ghostscript():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename, backend="ghostscript")
|
||||
return camelot.plot(tables[0], kind="contour")
|
||||
|
||||
|
||||
|
|
@ -40,21 +54,45 @@ def test_stream_contour_plot():
|
|||
|
||||
|
||||
@pytest.mark.mpl_image_compare(baseline_dir="files/baseline_plots", remove_text=True)
|
||||
def test_line_plot():
|
||||
def test_line_plot_poppler():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
tables = camelot.read_pdf(filename, backend="poppler")
|
||||
return camelot.plot(tables[0], kind="line")
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
@pytest.mark.mpl_image_compare(baseline_dir="files/baseline_plots", remove_text=True)
|
||||
def test_line_plot_ghostscript():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename, backend="ghostscript")
|
||||
return camelot.plot(tables[0], kind="line")
|
||||
|
||||
|
||||
@pytest.mark.mpl_image_compare(baseline_dir="files/baseline_plots", remove_text=True)
|
||||
def test_joint_plot():
|
||||
def test_joint_plot_poppler():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename)
|
||||
tables = camelot.read_pdf(filename, backend="poppler")
|
||||
return camelot.plot(tables[0], kind="joint")
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
@pytest.mark.mpl_image_compare(baseline_dir="files/baseline_plots", remove_text=True)
|
||||
def test_joint_plot_ghostscript():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename, backend="ghostscript")
|
||||
return camelot.plot(tables[0], kind="joint")
|
||||
|
||||
|
||||
@pytest.mark.mpl_image_compare(baseline_dir="files/baseline_plots", remove_text=True)
|
||||
def test_textedge_plot():
|
||||
filename = os.path.join(testdir, "tabula/12s0324.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
return camelot.plot(tables[0], kind="textedge")
|
||||
def test_grid_plot_poppler():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename, backend="poppler")
|
||||
return camelot.plot(tables[0], kind="grid")
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
@pytest.mark.mpl_image_compare(baseline_dir="files/baseline_plots", remove_text=True)
|
||||
def test_grid_plot_ghostscript():
|
||||
filename = os.path.join(testdir, "foo.pdf")
|
||||
tables = camelot.read_pdf(filename, backend="ghostscript")
|
||||
return camelot.plot(tables[0], kind="grid")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,133 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
import pandas as pd
|
||||
from pandas.testing import assert_frame_equal
|
||||
|
||||
import camelot
|
||||
from camelot.core import Table, TableList
|
||||
from camelot.__version__ import generate_version
|
||||
|
||||
from .data import *
|
||||
|
||||
testdir = os.path.dirname(os.path.abspath(__file__))
|
||||
testdir = os.path.join(testdir, "files")
|
||||
|
||||
|
||||
def test_stream():
|
||||
df = pd.DataFrame(data_stream)
|
||||
|
||||
filename = os.path.join(testdir, "health.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_table_rotated():
|
||||
df = pd.DataFrame(data_stream_table_rotated)
|
||||
|
||||
filename = os.path.join(testdir, "clockwise_table_2.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
filename = os.path.join(testdir, "anticlockwise_table_2.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_two_tables():
|
||||
df1 = pd.DataFrame(data_stream_two_tables_1)
|
||||
df2 = pd.DataFrame(data_stream_two_tables_2)
|
||||
|
||||
filename = os.path.join(testdir, "tabula/12s0324.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
|
||||
assert len(tables) == 2
|
||||
assert df1.equals(tables[0].df)
|
||||
assert df2.equals(tables[1].df)
|
||||
|
||||
|
||||
def test_stream_table_regions():
|
||||
df = pd.DataFrame(data_stream_table_areas)
|
||||
|
||||
filename = os.path.join(testdir, "tabula/us-007.pdf")
|
||||
tables = camelot.read_pdf(
|
||||
filename, flavor="stream", table_regions=["320,460,573,335"]
|
||||
)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_table_areas():
|
||||
df = pd.DataFrame(data_stream_table_areas)
|
||||
|
||||
filename = os.path.join(testdir, "tabula/us-007.pdf")
|
||||
tables = camelot.read_pdf(
|
||||
filename, flavor="stream", table_areas=["320,500,573,335"]
|
||||
)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_columns():
|
||||
df = pd.DataFrame(data_stream_columns)
|
||||
|
||||
filename = os.path.join(testdir, "mexican_towns.pdf")
|
||||
tables = camelot.read_pdf(
|
||||
filename, flavor="stream", columns=["67,180,230,425,475"], row_tol=10
|
||||
)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_split_text():
|
||||
df = pd.DataFrame(data_stream_split_text)
|
||||
|
||||
filename = os.path.join(testdir, "tabula/m27.pdf")
|
||||
tables = camelot.read_pdf(
|
||||
filename,
|
||||
flavor="stream",
|
||||
columns=["72,95,209,327,442,529,566,606,683"],
|
||||
split_text=True,
|
||||
)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_flag_size():
|
||||
df = pd.DataFrame(data_stream_flag_size)
|
||||
|
||||
filename = os.path.join(testdir, "superscript.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream", flag_size=True)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_strip_text():
|
||||
df = pd.DataFrame(data_stream_strip_text)
|
||||
|
||||
filename = os.path.join(testdir, "detect_vertical_false.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream", strip_text=" ,\n")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_edge_tol():
|
||||
df = pd.DataFrame(data_stream_edge_tol)
|
||||
|
||||
filename = os.path.join(testdir, "edge_tol.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream", edge_tol=500)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_layout_kwargs():
|
||||
df = pd.DataFrame(data_stream_layout_kwargs)
|
||||
|
||||
filename = os.path.join(testdir, "detect_vertical_false.pdf")
|
||||
tables = camelot.read_pdf(
|
||||
filename, flavor="stream", layout_kwargs={"detect_vertical": False}
|
||||
)
|
||||
assert_frame_equal(df, tables[0].df)
|
||||
|
||||
|
||||
def test_stream_duplicated_text():
|
||||
df = pd.DataFrame(data_stream_duplicated_text)
|
||||
|
||||
filename = os.path.join(testdir, "birdisland.pdf")
|
||||
tables = camelot.read_pdf(filename, flavor="stream")
|
||||
assert_frame_equal(df, tables[0].df)
|
||||