-
Notifications
You must be signed in to change notification settings - Fork 1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
d7f1567
commit 51c7c4a
Showing
13 changed files
with
126 additions
and
81 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
from typing import Dict | ||
|
||
import torch | ||
|
||
from surya.common.predictor import BasePredictor | ||
from surya.detection import DetectionPredictor | ||
from surya.layout import LayoutPredictor | ||
from surya.ocr_error import OCRErrorPredictor | ||
from surya.recognition import RecognitionPredictor | ||
|
||
|
||
def load_predictors( | ||
device: str | torch.device | None = None, | ||
dtype: torch.dtype | str | None = None) -> Dict[str, BasePredictor]: | ||
return { | ||
"layout": LayoutPredictor(device=device, dtype=dtype), | ||
"ocr_error": OCRErrorPredictor(device=device, dtype=dtype), | ||
"recognition": RecognitionPredictor(device=device, dtype=dtype), | ||
"detection": DetectionPredictor(device=device, dtype=dtype), | ||
} |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,60 @@ | ||
import math | ||
from typing import List, Optional | ||
|
||
import numpy as np | ||
import torch | ||
from tqdm import tqdm | ||
|
||
from surya.common.predictor import BasePredictor | ||
from surya.ocr_error.loader import OCRErrorLoader | ||
from surya.ocr_error.model.config import ID2LABEL | ||
from surya.schema import OCRErrorDetectionResult | ||
from surya.settings import settings | ||
|
||
|
||
class OCRErrorPredictor(BasePredictor): | ||
model_loader_cls = OCRErrorLoader | ||
|
||
def __call__( | ||
self, | ||
texts: List[str], | ||
batch_size: Optional[int] = None | ||
): | ||
return self.batch_ocr_error_detection(texts, batch_size) | ||
|
||
@staticmethod | ||
def get_batch_size(): | ||
batch_size = settings.OCR_ERROR_BATCH_SIZE | ||
if batch_size is None: | ||
batch_size = 8 | ||
if settings.TORCH_DEVICE_MODEL == "mps": | ||
batch_size = 8 | ||
if settings.TORCH_DEVICE_MODEL == "cuda": | ||
batch_size = 64 | ||
return batch_size | ||
|
||
def batch_ocr_error_detection( | ||
self, | ||
texts: List[str], | ||
batch_size: Optional[int] = None | ||
): | ||
if batch_size is None: | ||
batch_size = self.get_batch_size() | ||
|
||
num_batches = math.ceil(len(texts) / batch_size) | ||
texts_processed = self.processor(texts, padding='longest', truncation=True, return_tensors='pt') | ||
predictions = [] | ||
for batch_idx in tqdm(range(num_batches)): | ||
start_idx, end_idx = batch_idx * batch_size, (batch_idx + 1) * batch_size | ||
batch_input_ids = texts_processed.input_ids[start_idx:end_idx].to(self.model.device) | ||
batch_attention_mask = texts_processed.attention_mask[start_idx:end_idx].to(self.model.device) | ||
|
||
with torch.inference_mode(): | ||
pred = self.model(batch_input_ids, attention_mask=batch_attention_mask) | ||
logits = pred.logits.detach().cpu().numpy().astype(np.float32) | ||
predictions.extend(np.argmax(logits, axis=1).tolist()) | ||
|
||
return OCRErrorDetectionResult( | ||
texts=texts, | ||
labels=[ID2LABEL[p] for p in predictions] | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
from typing import Optional | ||
|
||
import torch | ||
|
||
from surya.common.load import ModelLoader | ||
from surya.ocr_error.model.config import DistilBertConfig | ||
from surya.ocr_error.model.encoder import DistilBertForSequenceClassification | ||
from surya.ocr_error.tokenizer import DistilBertTokenizer | ||
from surya.settings import settings | ||
|
||
|
||
class OCRErrorLoader(ModelLoader): | ||
def __init__(self, checkpoint: Optional[str] = None): | ||
super().__init__(checkpoint) | ||
|
||
if self.checkpoint is None: | ||
self.checkpoint = settings.OCR_ERROR_MODEL_CHECKPOINT | ||
|
||
def model( | ||
self, | ||
device=settings.TORCH_DEVICE_MODEL, | ||
dtype=settings.MODEL_DTYPE | ||
) -> DistilBertForSequenceClassification: | ||
config = DistilBertConfig.from_pretrained(self.checkpoint) | ||
model = DistilBertForSequenceClassification.from_pretrained(self.checkpoint, torch_dtype=dtype, config=config).to( | ||
device).eval() | ||
|
||
if settings.OCR_ERROR_STATIC_CACHE: | ||
torch.set_float32_matmul_precision('high') | ||
torch._dynamo.config.cache_size_limit = 1 | ||
torch._dynamo.config.suppress_errors = False | ||
|
||
print(f"Compiling detection model {self.checkpoint} on device {device} with dtype {dtype}") | ||
model = torch.compile(model) | ||
|
||
return model | ||
|
||
def processor( | ||
self | ||
) -> DistilBertTokenizer: | ||
return DistilBertTokenizer.from_pretrained(self.checkpoint) |
Empty file.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
Empty file.
Empty file.
Empty file.