mirror of https://github.com/InternLM/InternLM
				
				
				
			Update deployment example by changing list of string to string
							parent
							
								
									8194c348c1
								
							
						
					
					
						commit
						2fceba86f9
					
				| 
						 | 
				
			
			@ -207,7 +207,7 @@ With only 4 lines of codes, you can perform `internlm2-chat-7b` inference after
 | 
			
		|||
```python
 | 
			
		||||
from lmdeploy import pipeline
 | 
			
		||||
pipe = pipeline("internlm/internlm2-chat-7b")
 | 
			
		||||
response = pipe(["Hi, pls intro yourself", "Shanghai is"])
 | 
			
		||||
response = pipe("Hi, pls intro yourself")
 | 
			
		||||
print(response)
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -203,7 +203,7 @@ streamlit run ./chat/web_demo.py
 | 
			
		|||
```python
 | 
			
		||||
from lmdeploy import pipeline
 | 
			
		||||
pipe = pipeline("internlm/internlm2-chat-7b")
 | 
			
		||||
response = pipe(["Hi, pls intro yourself", "Shanghai is"])
 | 
			
		||||
response = pipe("Hi, pls intro yourself")
 | 
			
		||||
print(response)
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in New Issue