from app import create_app, db from app.routes import globals from src.main import Consultation import sys from pathlib import Path from transformers import AutoModelForCausalLM, AutoTokenizer from src.chat import init_model sys.path.append(str(Path(__file__).parent)) app = create_app() # 初始化数据库并创建表 with app.app_context(): db.create_all() # 初始化模型(只执行一次) # init_model() # 调用模型初始化函数 # 初始化 Consultation 对象,并传入模型(假设 Consultation 需要模型) _model, _tokenizer = init_model() print("模型类型:", type(_model)) # 应输出 transformers 模型类 consultation = Consultation() consultation.model = _model consultation.tokenizer = _tokenizer print("consultation.model 类型:", type(consultation.model)) # 同上 globals.consultation = consultation print("最终检查:", globals.consultation.model is not None) # 必须为 True print("Model属性存在:", hasattr(globals.consultation, 'model')) # print("Model loaded:", globals.consultation.model is not None) print("全局对象 Tokenizer 类型:", type(globals.consultation.tokenizer)) # 应显示 Tokenizer 类 if __name__ == '__main__': app.run(host='0.0.0.0',debug=True,port=5010,use_reloader=False)