Skip to content

Commit

Permalink
Rename sample.py to samplers.py
Browse files Browse the repository at this point in the history
  • Loading branch information
rlouf committed Nov 16, 2023
1 parent e6463ed commit 2b26310
Show file tree
Hide file tree
Showing 9 changed files with 15 additions and 10 deletions.
1 change: 0 additions & 1 deletion docs/api/sample.md

This file was deleted.

1 change: 1 addition & 0 deletions docs/api/samplers.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
::: outlines.text.generate.samplers
2 changes: 1 addition & 1 deletion mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -112,5 +112,5 @@ nav:
- api/fsm.md
- api/parsing.md
- api/regex.md
- api/sample.md
- api/samplers.md
- api/continuation.md
2 changes: 1 addition & 1 deletion outlines/text/generate/continuation.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from outlines.text.generate.sequence import Sequence

if TYPE_CHECKING:
from outlines.text.generate.sample import Sampler
from outlines.text.generate.samplers import Sampler


class Continuation(Sequence):
Expand Down
2 changes: 1 addition & 1 deletion outlines/text/generate/regex.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from outlines.text.types import python_types_to_regex

if TYPE_CHECKING:
from outlines.text.generate.sample import Sampler
from outlines.text.generate.samplers import Sampler


class Regex(Continuation):
Expand Down
File renamed without changes.
4 changes: 2 additions & 2 deletions outlines/text/generate/sequence.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

if TYPE_CHECKING:
from outlines.models.transformers import KVCacheType, Transformers
from outlines.text.generate.sample import Sampler
from outlines.text.generate.samplers import Sampler


class Sequence:
Expand Down Expand Up @@ -45,7 +45,7 @@ def __init__(
model.tokenizer.pad_token_id, device=model.device
)
if sampler is None:
from outlines.text.generate.sample import multinomial
from outlines.text.generate.samplers import multinomial

self.sampler = multinomial
else:
Expand Down
7 changes: 4 additions & 3 deletions outlines/text/generator.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import math
from typing import Generator, List
from typing import TYPE_CHECKING, Generator, List

import torch

from outlines.text.generate.sample import Sampler
if TYPE_CHECKING:
from outlines.text.generate.samplers import Sampler


def process(generator: Generator, index, token_ids: torch.Tensor):
Expand All @@ -28,7 +29,7 @@ def process(generator: Generator, index, token_ids: torch.Tensor):
yield next_token_id, token_ids


def token_generator(model, sampler: Sampler, samples: int, rng: torch.Generator):
def token_generator(model, sampler: "Sampler", samples: int, rng: torch.Generator):
"""Generator that yields a token every time it is called.
This process is designed to be steered by another supervising
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,11 @@

import torch

from outlines.text.generate.sample import greedy, multinomial, vectorized_random_choice
from outlines.text.generate.samplers import (
greedy,
multinomial,
vectorized_random_choice,
)


def test_greedy():
Expand Down

0 comments on commit 2b26310

Please sign in to comment.