参考 https://www.cnblogs.com/scarecrow-blog/p/17875127.html
模型下载之前说过一次 https://www.cnblogs.com/qcy-blog/p/18165717
也可直接去官网,把所有文件都点一遍
from langchain import PromptTemplate, LLMChain
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM, TextStreamer
from transformers import AutoModel, pipeline
from langchain import HuggingFacePipeline
from langchain import PromptTemplate
model_path = r'model\llama_model_4bit'
if torch.cuda.is_available():
print(torch.cuda.device_count())
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print(device)
else:
print('没有GPU')
tokenizer = AutoTokenizer.from_pretrained(model_path, use_fast=False)
if model_path.endswith("4bit"):
model = AutoModelForCausalLM.from_pretrained(
model_path,
load_in_4bit=True,
torch_dtype=torch.float16,
device_map='auto'
)
elif model_path.endswith("8bit"):
model = AutoModelForCausalLM.from_pretrained(
model_path,
load_in_8bit=True,
torch_dtype=torch.float16,
device_map='auto'
)
else:
model = AutoModelForCausalLM.from_pretrained(model_path).half().cuda()
pipe = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
max_length=512,
top_p=1,
repetition_penalty=1.15
)
llama_model = HuggingFacePipeline(pipeline=pipe)
template = '''
#context#
You are a good helpful, respectful and honest assistant.You are ready for answering human's question and always answer as helpfully as possible, while being safe.
Please ensure that your responses are socially unbiased and positive in nature.
#question#
Human:What is a good name for a company that makes {product}?"
'''
prompt = PromptTemplate(
input_variables=["product"],
template=template
)
chain = LLMChain(llm=llama_model, prompt=prompt)
chain.run("running shoes")
标签:调用,python,torch,langchain,import,device,path,model
From: https://www.cnblogs.com/qcy-blog/p/18166231