# import requests
|
# import json
|
#
|
#
|
# def deepseek_generation(prompt, api_key):
|
# """
|
# 调用DeepSeek模型的生成接口
|
# 参数:
|
# prompt: 用户输入的提示文本
|
# api_key: 用户的API密钥
|
# 返回:
|
# str: 模型生成的文本
|
# """
|
# url = "https://api.deepseek.com/v1/chat/completions"
|
#
|
# headers = {
|
# "Content-Type": "application/json",
|
# "Authorization": f"Bearer {api_key}",
|
# "Accept": "application/json"
|
# }
|
#
|
# # payload = {
|
# # "model": "deepseek-reasoner",
|
# # "messages": [
|
# # {"role": "user", "content": prompt}
|
# # ],
|
# # "temperature": 0.3,
|
# # "max_tokens": 2048
|
# # }
|
# payload = {
|
# "model": "deepseek-chat", # 确认模型名称
|
# "messages": [
|
# {"role": "system", "content": "你是一个有帮助的助手"},
|
# {"role": "user", "content": prompt}
|
# ],
|
# "temperature": 0.7, # 调整为官网默认值
|
# "max_tokens": 2048,
|
# "top_p": 1,
|
# "frequency_penalty": 0
|
# }
|
# try:
|
# response = requests.post(url, headers=headers, data=json.dumps(payload))
|
# response.raise_for_status()
|
#
|
# result = response.json()
|
# return result['choices'][0]['message']['content']
|
# except requests.exceptions.RequestException as e:
|
# print(f"API请求错误: {e}")
|
# return None
|
# except KeyError:
|
# print("响应格式解析失败")
|
# return None
|
#
|
#
|
# if __name__ == "__main__":
|
# # 使用示例
|
# api_key = "sk-055784b1d4904ce6a34b5733a091c130" # 替换为你的实际API密钥
|
# user_input = input("请输入您的问题:")
|
#
|
# response = deepseek_generation(user_input, api_key)
|
#
|
# if response:
|
# print("\nDeepSeek回答:")
|
# print(response)
|
# else:
|
# print("生成内容时出现错误")
|
|
|
import os
|
from openai import OpenAI
|
|
client = OpenAI(
|
api_key = os.environ.get("5017c24f-581f-48fb-abef-8ac4654e9018"),
|
base_url = "https://ark.cn-beijing.volces.com/api/v3",
|
)
|
|
# Non-streaming:
|
print("----- standard request -----")
|
completion = client.chat.completions.create(
|
model = "deepseek-r1-250120", # your model endpoint ID
|
messages = [
|
{"role": "system", "content": "你是人工智能助手"},
|
{"role": "user", "content": "帮我生成无忧渡电视剧中的实景拍摄地和行程安排?"},
|
],
|
)
|
print(completion.choices[0].message.content)
|
|
# Streaming:
|
print("----- streaming request -----")
|
stream = client.chat.completions.create(
|
model = "deepseek-r1-250120", # your model endpoint ID
|
messages = [
|
{"role": "system", "content": "你是人工智能助手"},
|
{"role": "user", "content": "帮我生成无忧渡电视剧中的实景拍摄地和行程安排?"},
|
],
|
stream=True
|
)
|
|
for chunk in stream:
|
if not chunk.choices:
|
continue
|
print(chunk.choices[0].delta.content, end="")
|
print()
|