You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Prompt executed in 12.23 seconds
got prompt
vision_config is None, using default vision config
Unused kwargs: ['_load_in_4bit', '_load_in_8bit', 'quant_method']. These kwargs are not used in <class 'transformers.utils.quantization_config.BitsAndBytesConfig'>.
False
'CUDASetup' object has no attribute 'cuda_available'
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
!!! Exception during processing !!! 'NoneType' object has no attribute 'cdequantize_blockwise_fp32'
Traceback (most recent call last):
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\execution.py", line 317, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\execution.py", line 192, in get_output_data
return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\execution.py", line 169, in _map_node_over_list
process_inputs(input_dict, i)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\execution.py", line 158, in process_inputs
results.append(getattr(obj, func)(**inputs))
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_MiniCPM-V-2_6-int4\nodes_legacy.py", line 254, in inference
result = self.model.chat(
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_minicpmv.py", line 380, in chat
res = self.generate(
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_minicpmv.py", line 256, in generate
) = self.get_vllm_embedding(model_inputs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_minicpmv.py", line 117, in get_vllm_embedding
vision_embedding = self.vpm(all_pixel_values, patch_attention_mask=patch_attn_mask, tgt_sizes=tgt_sizes).last_hidden_state
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\accelerate\hooks.py", line 166, in new_forward
output = module._old_forward(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_navit_siglip.py", line 918, in forward
encoder_outputs = self.encoder(
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\accelerate\hooks.py", line 166, in new_forward
output = module._old_forward(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_navit_siglip.py", line 826, in forward
layer_outputs = encoder_layer(
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\accelerate\hooks.py", line 166, in new_forward
output = module._old_forward(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_navit_siglip.py", line 670, in forward
hidden_states, attn_weights = self.self_attn(
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\accelerate\hooks.py", line 166, in new_forward
output = module._old_forward(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_navit_siglip.py", line 390, in forward
query_states = self.q_proj(hidden_states)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\accelerate\hooks.py", line 166, in new_forward
output = module._old_forward(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\bitsandbytes\nn\modules.py", line 256, in forward
out = bnb.matmul_4bit(x, self.weight.t(), bias=bias, quant_state=self.weight.quant_state)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\bitsandbytes\autograd_functions.py", line 577, in matmul_4bit
return MatMul4Bit.apply(A, B, out, bias, quant_state)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\autograd\function.py", line 539, in apply
return super().apply(*args, **kwargs) # type: ignore[misc]
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\bitsandbytes\autograd_functions.py", line 516, in forward
output = torch.nn.functional.linear(A, F.dequantize_4bit(B, quant_state).to(A.dtype).t(), bias)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\bitsandbytes\functional.py", line 1021, in dequantize_4bit
absmax = dequantize_blockwise(quant_state.absmax, quant_state.state2)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\bitsandbytes\functional.py", line 822, in dequantize_blockwise
lib.cdequantize_blockwise_fp32(get_ptr(quant_state.code), get_ptr(A), get_ptr(absmax), get_ptr(out), ct.c_int(quant_state.blocksize), ct.c_int(A.numel()))
AttributeError: 'NoneType' object has no attribute 'cdequantize_blockwise_fp32'
Prompt executed in 17.83 seconds
Automatically install the node will show error, and when I install the node manaully, it will show up this error.
I've put the whole minicpm repo(https://huggingface.co/openbmb/MiniCPM-V-2_6-int4/tree/main) in /models/prompt_generator,
And I've pip install the requirements.txt in /custom_node/minicpm
The text was updated successfully, but these errors were encountered:
Prompt executed in 12.23 seconds
got prompt
vision_config is None, using default vision config
Unused kwargs: ['_load_in_4bit', '_load_in_8bit', 'quant_method']. These kwargs are not used in <class 'transformers.utils.quantization_config.BitsAndBytesConfig'>.
False
'CUDASetup' object has no attribute 'cuda_available'
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
!!! Exception during processing !!! 'NoneType' object has no attribute 'cdequantize_blockwise_fp32'
Traceback (most recent call last):
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\execution.py", line 317, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\execution.py", line 192, in get_output_data
return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\execution.py", line 169, in _map_node_over_list
process_inputs(input_dict, i)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\execution.py", line 158, in process_inputs
results.append(getattr(obj, func)(**inputs))
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_MiniCPM-V-2_6-int4\nodes_legacy.py", line 254, in inference
result = self.model.chat(
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_minicpmv.py", line 380, in chat
res = self.generate(
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_minicpmv.py", line 256, in generate
) = self.get_vllm_embedding(model_inputs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_minicpmv.py", line 117, in get_vllm_embedding
vision_embedding = self.vpm(all_pixel_values, patch_attention_mask=patch_attn_mask, tgt_sizes=tgt_sizes).last_hidden_state
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\accelerate\hooks.py", line 166, in new_forward
output = module._old_forward(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_navit_siglip.py", line 918, in forward
encoder_outputs = self.encoder(
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\accelerate\hooks.py", line 166, in new_forward
output = module._old_forward(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_navit_siglip.py", line 826, in forward
layer_outputs = encoder_layer(
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\accelerate\hooks.py", line 166, in new_forward
output = module._old_forward(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_navit_siglip.py", line 670, in forward
hidden_states, attn_weights = self.self_attn(
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\accelerate\hooks.py", line 166, in new_forward
output = module._old_forward(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3.cache\huggingface\modules\transformers_modules\MiniCPM-V-2_6-int4\modeling_navit_siglip.py", line 390, in forward
query_states = self.q_proj(hidden_states)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\accelerate\hooks.py", line 166, in new_forward
output = module._old_forward(*args, **kwargs)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\bitsandbytes\nn\modules.py", line 256, in forward
out = bnb.matmul_4bit(x, self.weight.t(), bias=bias, quant_state=self.weight.quant_state)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\bitsandbytes\autograd_functions.py", line 577, in matmul_4bit
return MatMul4Bit.apply(A, B, out, bias, quant_state)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\torch\autograd\function.py", line 539, in apply
return super().apply(*args, **kwargs) # type: ignore[misc]
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\bitsandbytes\autograd_functions.py", line 516, in forward
output = torch.nn.functional.linear(A, F.dequantize_4bit(B, quant_state).to(A.dtype).t(), bias)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\bitsandbytes\functional.py", line 1021, in dequantize_4bit
absmax = dequantize_blockwise(quant_state.absmax, quant_state.state2)
File "D:\Stable DIffusion\comfyui-aki\ComfyUI-aki-v1.3\python\lib\site-packages\bitsandbytes\functional.py", line 822, in dequantize_blockwise
lib.cdequantize_blockwise_fp32(get_ptr(quant_state.code), get_ptr(A), get_ptr(absmax), get_ptr(out), ct.c_int(quant_state.blocksize), ct.c_int(A.numel()))
AttributeError: 'NoneType' object has no attribute 'cdequantize_blockwise_fp32'
Prompt executed in 17.83 seconds
Automatically install the node will show error, and when I install the node manaully, it will show up this error.
I've put the whole minicpm repo(https://huggingface.co/openbmb/MiniCPM-V-2_6-int4/tree/main) in /models/prompt_generator,
And I've pip install the requirements.txt in /custom_node/minicpm
The text was updated successfully, but these errors were encountered: