llm_flask_api.py 784 B

12345678910111213141516171819202122232425262728293031
  1. import requests
  2. from flask import Flask, request, jsonify
  3. import numpy as np
  4. import pandas as pd
  5. import os
  6. #大模型url
  7. url = "http://localhost:11434/api/generate"
  8. app = Flask(__name__)
  9. @app.route('/llm', methods=['POST'])
  10. def llm():
  11. try:
  12. # mod = request.json.get('model')
  13. mod = "qwen2.5:1.5b"
  14. q = request.json.get('prompt')
  15. payload = {"model": mod, "prompt": q,"stream":False}
  16. response = requests.post(url, json=payload)
  17. response_json = response.json()
  18. response_text = response_json.get('response', '无结果')
  19. return response_text
  20. except Exception as e:
  21. return jsonify({
  22. 'code': 500,
  23. 'msg': str(e)
  24. })
  25. if __name__ == '__main__':
  26. app.run(debug=True, port=11001)