Skip to content

Commit

Permalink
Relax dependencies for the library (#304)
Browse files Browse the repository at this point in the history
* Relax dependencies for the library

* Relax dependencies fix bug (#305)

* Wording fixes in prompt_examples.md (#303)

A few typo fixes in promp_examples.md.

* "Question Answer" should be "Question Answering"
* "Machine Reading" is a bit of an esoteric technical term, and can probably be removed.
* "Japanese-to-Python" is simpler English.

* fix bug when sequnece max length is None

* lint

---------

Co-authored-by: zhaochen20 <[email protected]>
Co-authored-by: Graham Neubig <[email protected]>

---------

Co-authored-by: Eren Chenyang Zhao <[email protected]>
Co-authored-by: zhaochen20 <[email protected]>
  • Loading branch information
3 people authored Aug 29, 2023
1 parent e11144e commit 6515ea8
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 23 deletions.
4 changes: 3 additions & 1 deletion prompt2model/model_executor/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,15 @@ def __init__(
self.tokenizer_max_length = tokenizer_max_length
self.sequence_max_length = sequence_max_length
if self.sequence_max_length is None:
max_length = self.model.config.max_length
logger.warning(
(
"The `max_length` in `self.model.generate` will default to "
f"`self.model.config.max_length` ({self.model.config.max_length})"
f"`self.model.config.max_length` ({max_length})"
" if `sequence_max_length` is `None`."
)
)
self.sequence_max_length = max_length
if hasattr(self.model.config, "max_position_embeddings"):
max_embeddings = self.model.config.max_position_embeddings
if sequence_max_length is not None and sequence_max_length > max_embeddings:
Expand Down
44 changes: 22 additions & 22 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,28 +21,28 @@ classifiers = [
"Programming Language :: Python :: 3.11",
]
dependencies = [
"transformers==4.24.0",
"datasets==2.11.0",
"pandas==1.5.3",
"fastapi==0.100.0",
"gradio==3.38.0",
"torch==2.0.0",
"pytest==7.3.1",
"openai==0.27.2",
"sentencepiece==0.1.99",
"bert_score==0.3.13",
"sacrebleu==2.3.1",
"evaluate==0.4.0",
"tevatron==0.1.0",
"faiss-cpu==1.7.4",
"mdtex2html==1.2.0",
"scikit-learn==1.2.2",
"retriv==0.2.1",
"tiktoken==0.4.0",
"aiolimiter==1.1.0",
"pyfiglet==0.8.post1",
"termcolor==2.3.0",
"psutil==5.9.5",
"transformers",
"datasets",
"pandas",
"fastapi",
"gradio",
"torch",
"pytest",
"openai",
"sentencepiece",
"bert_score",
"sacrebleu",
"evaluate",
"tevatron",
"faiss-cpu",
"mdtex2html",
"scikit-learn",
"retriv",
"tiktoken",
"aiolimiter",
"pyfiglet",
"termcolor",
"psutil",
"protobuf==3.20.0",
]

Expand Down

0 comments on commit 6515ea8

Please sign in to comment.