mirror of https://github.com/InternLM/InternLM
Update web_demo.py
parent
ad4a918402
commit
33b87bcba9
|
@ -8,7 +8,6 @@ Please refer to these links below for more information:
|
||||||
Please run with the command `streamlit run path/to/web_demo.py --server.address=0.0.0.0 --server.port 7860`.
|
Please run with the command `streamlit run path/to/web_demo.py --server.address=0.0.0.0 --server.port 7860`.
|
||||||
Using `python path/to/web_demo.py` may cause unknown problems.
|
Using `python path/to/web_demo.py` may cause unknown problems.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import warnings
|
import warnings
|
||||||
from dataclasses import asdict, dataclass
|
from dataclasses import asdict, dataclass
|
||||||
|
@ -17,11 +16,13 @@ from typing import Callable, List, Optional
|
||||||
import streamlit as st
|
import streamlit as st
|
||||||
import torch
|
import torch
|
||||||
from torch import nn
|
from torch import nn
|
||||||
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
||||||
from transformers.generation.utils import (LogitsProcessorList,
|
from transformers.generation.utils import (LogitsProcessorList,
|
||||||
StoppingCriteriaList)
|
StoppingCriteriaList)
|
||||||
from transformers.utils import logging
|
from transformers.utils import logging
|
||||||
|
|
||||||
|
from transformers import AutoTokenizer, AutoModelForCausalLM # isort: skip
|
||||||
|
|
||||||
|
|
||||||
logger = logging.get_logger(__name__)
|
logger = logging.get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue