runtime error
Exit code: 1. Reason: le File "<frozen importlib._bootstrap>", line 241, in _call_with_frames_removed File "/usr/local/lib/python3.10/site-packages/transformers/models/llama/modeling_llama.py", line 55, in <module> from flash_attn import flash_attn_func, flash_attn_varlen_func File "/usr/local/lib/python3.10/site-packages/flash_attn/__init__.py", line 3, in <module> from flash_attn.flash_attn_interface import ( File "/usr/local/lib/python3.10/site-packages/flash_attn/flash_attn_interface.py", line 15, in <module> import flash_attn_2_cuda as flash_attn_gpu ModuleNotFoundError: No module named 'flash_attn_2_cuda' The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/home/user/app/app.py", line 16, in <module> from oryx.conversation import conv_templates, SeparatorStyle File "/home/user/app/oryx/__init__.py", line 1, in <module> from .model import OryxLlamaForCausalLM File "/home/user/app/oryx/model/__init__.py", line 13, in <module> raise e File "/home/user/app/oryx/model/__init__.py", line 11, in <module> exec(f"from .language_model.{model_name} import {model_classes}") File "<string>", line 1, in <module> File "/home/user/app/oryx/model/language_model/oryx_llama.py", line 6, in <module> from transformers import AutoConfig, AutoModelForCausalLM, \ File "<frozen importlib._bootstrap>", line 1075, in _handle_fromlist File "/usr/local/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1463, in __getattr__ value = getattr(module, name) File "/usr/local/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1462, in __getattr__ module = self._get_module(self._class_to_module[name]) File "/usr/local/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1474, in _get_module raise RuntimeError( RuntimeError: Failed to import transformers.models.llama.modeling_llama because of the following error (look up to see its traceback): No module named 'flash_attn_2_cuda'
Container logs:
Fetching error logs...