├── .gitignore ├── EmoitonQwen-main ├── emotion_benchmarks │ ├── DFEW │ │ ├── Readme.md │ │ ├── dfew_emotion_DeepSeekVL.py │ │ ├── dfew_emotion_EmotionQwen.py │ │ ├── dfew_emotion_InstructBLIP.py │ │ ├── dfew_emotion_LLaVA.py │ │ ├── dfew_emotion_Qwen2VL.py │ │ ├── dfew_emotion_QwenVL.py │ │ └── model_utils │ │ │ └── llava │ │ │ ├── __init__.py │ │ │ ├── constants.py │ │ │ ├── conversation.py │ │ │ ├── eval │ │ │ ├── eval_gpt_review.py │ │ │ ├── eval_gpt_review_bench.py │ │ │ ├── eval_gpt_review_visual.py │ │ │ ├── eval_pope.py │ │ │ ├── eval_science_qa.py │ │ │ ├── eval_science_qa_gpt4.py │ │ │ ├── eval_science_qa_gpt4_requery.py │ │ │ ├── eval_textvqa.py │ │ │ ├── generate_webpage_data_from_table.py │ │ │ ├── m4c_evaluator.py │ │ │ ├── model_qa.py │ │ │ ├── model_vqa.py │ │ │ ├── model_vqa_loader.py │ │ │ ├── model_vqa_mmbench.py │ │ │ ├── model_vqa_science.py │ │ │ ├── qa_baseline_gpt35.py │ │ │ ├── run_llava.py │ │ │ ├── summarize_gpt_review.py │ │ │ ├── table │ │ │ │ ├── answer │ │ │ │ │ ├── answer_alpaca-13b.jsonl │ │ │ │ │ ├── answer_bard.jsonl │ │ │ │ │ ├── answer_gpt35.jsonl │ │ │ │ │ ├── answer_llama-13b.jsonl │ │ │ │ │ └── answer_vicuna-13b.jsonl │ │ │ │ ├── caps_boxes_coco2014_val_80.jsonl │ │ │ │ ├── model.jsonl │ │ │ │ ├── prompt.jsonl │ │ │ │ ├── question.jsonl │ │ │ │ ├── results │ │ │ │ │ ├── test_sqa_llava_13b_v0.json │ │ │ │ │ └── test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json │ │ │ │ ├── review │ │ │ │ │ ├── review_alpaca-13b_vicuna-13b.jsonl │ │ │ │ │ ├── review_bard_vicuna-13b.jsonl │ │ │ │ │ ├── review_gpt35_vicuna-13b.jsonl │ │ │ │ │ └── review_llama-13b_vicuna-13b.jsonl │ │ │ │ ├── reviewer.jsonl │ │ │ │ └── rule.json │ │ │ └── webpage │ │ │ │ ├── figures │ │ │ │ ├── alpaca.png │ │ │ │ ├── bard.jpg │ │ │ │ ├── chatgpt.svg │ │ │ │ ├── llama.jpg │ │ │ │ ├── swords_FILL0_wght300_GRAD0_opsz48.svg │ │ │ │ └── vicuna.jpeg │ │ │ │ ├── index.html │ │ │ │ ├── script.js │ │ │ │ └── styles.css │ │ │ ├── mm_utils.py │ │ │ ├── model │ │ │ ├── __init__.py │ │ │ ├── apply_delta.py │ │ │ ├── builder.py │ │ │ ├── consolidate.py │ │ │ ├── language_model │ │ │ │ ├── llava_llama.py │ │ │ │ ├── llava_mistral.py │ │ │ │ └── llava_mpt.py │ │ │ ├── llava_arch.py │ │ │ ├── make_delta.py │ │ │ ├── multimodal_encoder │ │ │ │ ├── builder.py │ │ │ │ └── clip_encoder.py │ │ │ ├── multimodal_projector │ │ │ │ └── builder.py │ │ │ └── utils.py │ │ │ ├── serve │ │ │ ├── __init__.py │ │ │ ├── cli.py │ │ │ ├── controller.py │ │ │ ├── examples │ │ │ │ ├── extreme_ironing.jpg │ │ │ │ └── waterview.jpg │ │ │ ├── gradio_web_server.py │ │ │ ├── model_worker.py │ │ │ ├── register_worker.py │ │ │ ├── sglang_worker.py │ │ │ └── test_message.py │ │ │ ├── train │ │ │ ├── llama_flash_attn_monkey_patch.py │ │ │ ├── llama_xformers_attn_monkey_patch.py │ │ │ ├── llava_trainer.py │ │ │ ├── train.py │ │ │ ├── train_mem.py │ │ │ └── train_xformers.py │ │ │ └── utils.py │ ├── EMER │ │ ├── Readme.md │ │ ├── evaluate_by_GPT.py │ │ ├── generate_csv_DeepSeekVL.py │ │ ├── generate_csv_EmotionQwen.py │ │ ├── generate_csv_InstructBLIP.py │ │ ├── generate_csv_LLaVA.py │ │ ├── generate_csv_Qwen2VL.py │ │ ├── generate_csv_QwenVL.py │ │ ├── model_utils │ │ │ └── llava │ │ │ │ ├── __init__.py │ │ │ │ ├── constants.py │ │ │ │ ├── conversation.py │ │ │ │ ├── eval │ │ │ │ ├── eval_gpt_review.py │ │ │ │ ├── eval_gpt_review_bench.py │ │ │ │ ├── eval_gpt_review_visual.py │ │ │ │ ├── eval_pope.py │ │ │ │ ├── eval_science_qa.py │ │ │ │ ├── eval_science_qa_gpt4.py │ │ │ │ ├── eval_science_qa_gpt4_requery.py │ │ │ │ ├── eval_textvqa.py │ │ │ │ ├── generate_webpage_data_from_table.py │ │ │ │ ├── m4c_evaluator.py │ │ │ │ ├── model_qa.py │ │ │ │ ├── model_vqa.py │ │ │ │ ├── model_vqa_loader.py │ │ │ │ ├── model_vqa_mmbench.py │ │ │ │ ├── model_vqa_science.py │ │ │ │ ├── qa_baseline_gpt35.py │ │ │ │ ├── run_llava.py │ │ │ │ ├── summarize_gpt_review.py │ │ │ │ ├── table │ │ │ │ │ ├── answer │ │ │ │ │ │ ├── answer_alpaca-13b.jsonl │ │ │ │ │ │ ├── answer_bard.jsonl │ │ │ │ │ │ ├── answer_gpt35.jsonl │ │ │ │ │ │ ├── answer_llama-13b.jsonl │ │ │ │ │ │ └── answer_vicuna-13b.jsonl │ │ │ │ │ ├── caps_boxes_coco2014_val_80.jsonl │ │ │ │ │ ├── model.jsonl │ │ │ │ │ ├── prompt.jsonl │ │ │ │ │ ├── question.jsonl │ │ │ │ │ ├── results │ │ │ │ │ │ ├── test_sqa_llava_13b_v0.json │ │ │ │ │ │ └── test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json │ │ │ │ │ ├── review │ │ │ │ │ │ ├── review_alpaca-13b_vicuna-13b.jsonl │ │ │ │ │ │ ├── review_bard_vicuna-13b.jsonl │ │ │ │ │ │ ├── review_gpt35_vicuna-13b.jsonl │ │ │ │ │ │ └── review_llama-13b_vicuna-13b.jsonl │ │ │ │ │ ├── reviewer.jsonl │ │ │ │ │ └── rule.json │ │ │ │ └── webpage │ │ │ │ │ ├── figures │ │ │ │ │ ├── alpaca.png │ │ │ │ │ ├── bard.jpg │ │ │ │ │ ├── chatgpt.svg │ │ │ │ │ ├── llama.jpg │ │ │ │ │ ├── swords_FILL0_wght300_GRAD0_opsz48.svg │ │ │ │ │ └── vicuna.jpeg │ │ │ │ │ ├── index.html │ │ │ │ │ ├── script.js │ │ │ │ │ └── styles.css │ │ │ │ ├── mm_utils.py │ │ │ │ ├── model │ │ │ │ ├── __init__.py │ │ │ │ ├── apply_delta.py │ │ │ │ ├── builder.py │ │ │ │ ├── consolidate.py │ │ │ │ ├── language_model │ │ │ │ │ ├── llava_llama.py │ │ │ │ │ ├── llava_mistral.py │ │ │ │ │ └── llava_mpt.py │ │ │ │ ├── llava_arch.py │ │ │ │ ├── make_delta.py │ │ │ │ ├── multimodal_encoder │ │ │ │ │ ├── builder.py │ │ │ │ │ └── clip_encoder.py │ │ │ │ ├── multimodal_projector │ │ │ │ │ └── builder.py │ │ │ │ └── utils.py │ │ │ │ ├── serve │ │ │ │ ├── __init__.py │ │ │ │ ├── cli.py │ │ │ │ ├── controller.py │ │ │ │ ├── examples │ │ │ │ │ ├── extreme_ironing.jpg │ │ │ │ │ └── waterview.jpg │ │ │ │ ├── gradio_web_server.py │ │ │ │ ├── model_worker.py │ │ │ │ ├── register_worker.py │ │ │ │ ├── sglang_worker.py │ │ │ │ └── test_message.py │ │ │ │ ├── train │ │ │ │ ├── llama_flash_attn_monkey_patch.py │ │ │ │ ├── llama_xformers_attn_monkey_patch.py │ │ │ │ ├── llava_trainer.py │ │ │ │ ├── train.py │ │ │ │ ├── train_mem.py │ │ │ │ └── train_xformers.py │ │ │ │ └── utils.py │ │ └── results │ │ │ ├── gt-chi.csv │ │ │ └── gt-eng.csv │ └── MER2024 │ │ ├── MER2024_DeepSeekVL.py │ │ ├── MER2024_InstructBLIP.py │ │ ├── MER2024_LLaVA.py │ │ ├── MER2024_Qwen2VL.py │ │ ├── MER2024_QwenVL.py │ │ ├── MER2024_emotion_qwen.py │ │ ├── Readme.md │ │ └── model_utils │ │ └── llava │ │ ├── __init__.py │ │ ├── constants.py │ │ ├── conversation.py │ │ ├── eval │ │ ├── eval_gpt_review.py │ │ ├── eval_gpt_review_bench.py │ │ ├── eval_gpt_review_visual.py │ │ ├── eval_pope.py │ │ ├── eval_science_qa.py │ │ ├── eval_science_qa_gpt4.py │ │ ├── eval_science_qa_gpt4_requery.py │ │ ├── eval_textvqa.py │ │ ├── generate_webpage_data_from_table.py │ │ ├── m4c_evaluator.py │ │ ├── model_qa.py │ │ ├── model_vqa.py │ │ ├── model_vqa_loader.py │ │ ├── model_vqa_mmbench.py │ │ ├── model_vqa_science.py │ │ ├── qa_baseline_gpt35.py │ │ ├── run_llava.py │ │ ├── summarize_gpt_review.py │ │ ├── table │ │ │ ├── answer │ │ │ │ ├── answer_alpaca-13b.jsonl │ │ │ │ ├── answer_bard.jsonl │ │ │ │ ├── answer_gpt35.jsonl │ │ │ │ ├── answer_llama-13b.jsonl │ │ │ │ └── answer_vicuna-13b.jsonl │ │ │ ├── caps_boxes_coco2014_val_80.jsonl │ │ │ ├── model.jsonl │ │ │ ├── prompt.jsonl │ │ │ ├── question.jsonl │ │ │ ├── results │ │ │ │ ├── test_sqa_llava_13b_v0.json │ │ │ │ └── test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json │ │ │ ├── review │ │ │ │ ├── review_alpaca-13b_vicuna-13b.jsonl │ │ │ │ ├── review_bard_vicuna-13b.jsonl │ │ │ │ ├── review_gpt35_vicuna-13b.jsonl │ │ │ │ └── review_llama-13b_vicuna-13b.jsonl │ │ │ ├── reviewer.jsonl │ │ │ └── rule.json │ │ └── webpage │ │ │ ├── figures │ │ │ ├── alpaca.png │ │ │ ├── bard.jpg │ │ │ ├── chatgpt.svg │ │ │ ├── llama.jpg │ │ │ ├── swords_FILL0_wght300_GRAD0_opsz48.svg │ │ │ └── vicuna.jpeg │ │ │ ├── index.html │ │ │ ├── script.js │ │ │ └── styles.css │ │ ├── mm_utils.py │ │ ├── model │ │ ├── __init__.py │ │ ├── apply_delta.py │ │ ├── builder.py │ │ ├── consolidate.py │ │ ├── language_model │ │ │ ├── llava_llama.py │ │ │ ├── llava_mistral.py │ │ │ └── llava_mpt.py │ │ ├── llava_arch.py │ │ ├── make_delta.py │ │ ├── multimodal_encoder │ │ │ ├── builder.py │ │ │ └── clip_encoder.py │ │ ├── multimodal_projector │ │ │ └── builder.py │ │ └── utils.py │ │ ├── serve │ │ ├── __init__.py │ │ ├── cli.py │ │ ├── controller.py │ │ ├── examples │ │ │ ├── extreme_ironing.jpg │ │ │ └── waterview.jpg │ │ ├── gradio_web_server.py │ │ ├── model_worker.py │ │ ├── register_worker.py │ │ ├── sglang_worker.py │ │ └── test_message.py │ │ ├── train │ │ ├── llama_flash_attn_monkey_patch.py │ │ ├── llama_xformers_attn_monkey_patch.py │ │ ├── llava_trainer.py │ │ ├── train.py │ │ ├── train_mem.py │ │ └── train_xformers.py │ │ └── utils.py ├── environment.yaml ├── eval_data │ ├── DFEW │ │ └── test(single-labeled) │ │ │ ├── set_1.csv │ │ │ ├── set_2.csv │ │ │ ├── set_3.csv │ │ │ ├── set_4.csv │ │ │ └── set_5.csv │ ├── EMER │ │ └── dataset-v1 │ │ │ ├── gt-chi.csv │ │ │ └── gt-eng.csv │ ├── EMOSET_val │ │ └── dataset_infos.json │ └── MER2024 │ │ ├── reference-noise.csv │ │ └── reference-semi.csv ├── general_benchmarks.yaml ├── general_benchmarks │ ├── .gitignore │ ├── Readme.md │ ├── __init__.py │ ├── datasets │ │ ├── __init__.py │ │ ├── base_eval_dataset.py │ │ ├── emoset.py │ │ ├── mmbench.py │ │ ├── mme.py │ │ ├── pope.py │ │ ├── scienceqa.py │ │ ├── seedbench.py │ │ ├── textvqa.py │ │ └── vqav2.py │ ├── evaluate.py │ └── models │ │ ├── __init__.py │ │ ├── base_model.py │ │ ├── deepseek_vl.py │ │ ├── emotion_qwen.py │ │ ├── gpt4v.py │ │ ├── instructblip.py │ │ ├── llava_model.py │ │ ├── minicpm_v.py │ │ ├── model_utils │ │ ├── llava │ │ │ ├── __init__.py │ │ │ ├── constants.py │ │ │ ├── conversation.py │ │ │ ├── eval │ │ │ │ ├── eval_gpt_review.py │ │ │ │ ├── eval_gpt_review_bench.py │ │ │ │ ├── eval_gpt_review_visual.py │ │ │ │ ├── eval_pope.py │ │ │ │ ├── eval_science_qa.py │ │ │ │ ├── eval_science_qa_gpt4.py │ │ │ │ ├── eval_science_qa_gpt4_requery.py │ │ │ │ ├── eval_textvqa.py │ │ │ │ ├── generate_webpage_data_from_table.py │ │ │ │ ├── m4c_evaluator.py │ │ │ │ ├── model_qa.py │ │ │ │ ├── model_vqa.py │ │ │ │ ├── model_vqa_loader.py │ │ │ │ ├── model_vqa_mmbench.py │ │ │ │ ├── model_vqa_science.py │ │ │ │ ├── qa_baseline_gpt35.py │ │ │ │ ├── run_llava.py │ │ │ │ ├── summarize_gpt_review.py │ │ │ │ ├── table │ │ │ │ │ ├── answer │ │ │ │ │ │ ├── answer_alpaca-13b.jsonl │ │ │ │ │ │ ├── answer_bard.jsonl │ │ │ │ │ │ ├── answer_gpt35.jsonl │ │ │ │ │ │ ├── answer_llama-13b.jsonl │ │ │ │ │ │ └── answer_vicuna-13b.jsonl │ │ │ │ │ ├── caps_boxes_coco2014_val_80.jsonl │ │ │ │ │ ├── model.jsonl │ │ │ │ │ ├── prompt.jsonl │ │ │ │ │ ├── question.jsonl │ │ │ │ │ ├── results │ │ │ │ │ │ ├── test_sqa_llava_13b_v0.json │ │ │ │ │ │ └── test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json │ │ │ │ │ ├── review │ │ │ │ │ │ ├── review_alpaca-13b_vicuna-13b.jsonl │ │ │ │ │ │ ├── review_bard_vicuna-13b.jsonl │ │ │ │ │ │ ├── review_gpt35_vicuna-13b.jsonl │ │ │ │ │ │ └── review_llama-13b_vicuna-13b.jsonl │ │ │ │ │ ├── reviewer.jsonl │ │ │ │ │ └── rule.json │ │ │ │ └── webpage │ │ │ │ │ ├── figures │ │ │ │ │ ├── alpaca.png │ │ │ │ │ ├── bard.jpg │ │ │ │ │ ├── chatgpt.svg │ │ │ │ │ ├── llama.jpg │ │ │ │ │ ├── swords_FILL0_wght300_GRAD0_opsz48.svg │ │ │ │ │ └── vicuna.jpeg │ │ │ │ │ ├── index.html │ │ │ │ │ ├── script.js │ │ │ │ │ └── styles.css │ │ │ ├── mm_utils.py │ │ │ ├── model │ │ │ │ ├── __init__.py │ │ │ │ ├── apply_delta.py │ │ │ │ ├── builder.py │ │ │ │ ├── consolidate.py │ │ │ │ ├── language_model │ │ │ │ │ ├── llava_llama.py │ │ │ │ │ ├── llava_mistral.py │ │ │ │ │ └── llava_mpt.py │ │ │ │ ├── llava_arch.py │ │ │ │ ├── make_delta.py │ │ │ │ ├── multimodal_encoder │ │ │ │ │ ├── builder.py │ │ │ │ │ └── clip_encoder.py │ │ │ │ ├── multimodal_projector │ │ │ │ │ └── builder.py │ │ │ │ └── utils.py │ │ │ ├── serve │ │ │ │ ├── __init__.py │ │ │ │ ├── cli.py │ │ │ │ ├── controller.py │ │ │ │ ├── examples │ │ │ │ │ ├── extreme_ironing.jpg │ │ │ │ │ └── waterview.jpg │ │ │ │ ├── gradio_web_server.py │ │ │ │ ├── model_worker.py │ │ │ │ ├── register_worker.py │ │ │ │ ├── sglang_worker.py │ │ │ │ └── test_message.py │ │ │ ├── train │ │ │ │ ├── llama_flash_attn_monkey_patch.py │ │ │ │ ├── llama_xformers_attn_monkey_patch.py │ │ │ │ ├── llava_trainer.py │ │ │ │ ├── train.py │ │ │ │ ├── train_mem.py │ │ │ │ └── train_xformers.py │ │ │ └── utils.py │ │ └── qwen_vl_utils │ │ │ ├── __init__.py │ │ │ └── vision_process.py │ │ ├── mplug_owl.py │ │ ├── otter_image.py │ │ ├── otter_video.py │ │ ├── qwen2_vl.py │ │ ├── qwen_vl.py │ │ └── video_chat.py ├── infer │ └── EmotionQwen_infer.py └── train │ ├── README.md │ ├── __init__.py │ ├── dataset.py │ ├── ds_config_zero2.json │ ├── ds_config_zero3.json │ ├── finetune.py │ ├── finetune_ds.sh │ ├── finetune_lora.sh │ ├── merge_lora.py │ ├── trainer.py │ └── vision_process.py ├── LICENSE ├── README.md └── images ├── Emotion-Qwen.png └── sample1.jpg /.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/.gitignore -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/Readme.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/Readme.md -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_DeepSeekVL.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_DeepSeekVL.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_EmotionQwen.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_EmotionQwen.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_InstructBLIP.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_InstructBLIP.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_LLaVA.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_LLaVA.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_Qwen2VL.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_Qwen2VL.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_QwenVL.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/dfew_emotion_QwenVL.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/__init__.py: -------------------------------------------------------------------------------- 1 | from .model import LlavaLlamaForCausalLM 2 | -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/constants.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/constants.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/conversation.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/conversation.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_gpt_review.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_gpt_review.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_gpt_review_bench.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_gpt_review_bench.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_gpt_review_visual.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_gpt_review_visual.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_pope.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_pope.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_science_qa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_science_qa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_science_qa_gpt4.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_science_qa_gpt4.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_science_qa_gpt4_requery.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_science_qa_gpt4_requery.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_textvqa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/eval_textvqa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/generate_webpage_data_from_table.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/generate_webpage_data_from_table.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/m4c_evaluator.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/m4c_evaluator.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/model_qa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/model_qa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/model_vqa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/model_vqa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/model_vqa_loader.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/model_vqa_loader.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/model_vqa_mmbench.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/model_vqa_mmbench.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/model_vqa_science.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/model_vqa_science.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/qa_baseline_gpt35.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/qa_baseline_gpt35.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/run_llava.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/run_llava.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/summarize_gpt_review.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/summarize_gpt_review.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/answer/answer_alpaca-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/answer/answer_alpaca-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/answer/answer_bard.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/answer/answer_bard.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/answer/answer_gpt35.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/answer/answer_gpt35.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/answer/answer_llama-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/answer/answer_llama-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/answer/answer_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/answer/answer_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/caps_boxes_coco2014_val_80.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/caps_boxes_coco2014_val_80.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/model.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/model.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/prompt.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/prompt.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/question.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/question.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/results/test_sqa_llava_13b_v0.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/results/test_sqa_llava_13b_v0.json -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/results/test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/results/test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/review/review_alpaca-13b_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/review/review_alpaca-13b_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/review/review_bard_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/review/review_bard_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/review/review_gpt35_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/review/review_gpt35_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/review/review_llama-13b_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/review/review_llama-13b_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/reviewer.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/reviewer.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/rule.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/table/rule.json -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/alpaca.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/alpaca.png -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/bard.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/bard.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/chatgpt.svg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/chatgpt.svg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/llama.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/llama.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/swords_FILL0_wght300_GRAD0_opsz48.svg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/swords_FILL0_wght300_GRAD0_opsz48.svg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/vicuna.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/figures/vicuna.jpeg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/index.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/index.html -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/script.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/script.js -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/styles.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/eval/webpage/styles.css -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/mm_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/mm_utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/__init__.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/apply_delta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/apply_delta.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/consolidate.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/consolidate.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/language_model/llava_llama.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/language_model/llava_llama.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/language_model/llava_mistral.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/language_model/llava_mistral.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/language_model/llava_mpt.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/language_model/llava_mpt.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/llava_arch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/llava_arch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/make_delta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/make_delta.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/multimodal_encoder/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/multimodal_encoder/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/multimodal_encoder/clip_encoder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/multimodal_encoder/clip_encoder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/multimodal_projector/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/multimodal_projector/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/model/utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/cli.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/cli.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/controller.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/controller.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/examples/extreme_ironing.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/examples/extreme_ironing.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/examples/waterview.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/examples/waterview.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/gradio_web_server.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/gradio_web_server.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/model_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/model_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/register_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/register_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/sglang_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/sglang_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/test_message.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/serve/test_message.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/llama_flash_attn_monkey_patch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/llama_flash_attn_monkey_patch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/llama_xformers_attn_monkey_patch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/llama_xformers_attn_monkey_patch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/llava_trainer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/llava_trainer.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/train.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/train.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/train_mem.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/train_mem.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/train_xformers.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/train/train_xformers.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/DFEW/model_utils/llava/utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/Readme.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/Readme.md -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/evaluate_by_GPT.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/evaluate_by_GPT.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_DeepSeekVL.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_DeepSeekVL.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_EmotionQwen.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_EmotionQwen.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_InstructBLIP.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_InstructBLIP.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_LLaVA.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_LLaVA.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_Qwen2VL.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_Qwen2VL.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_QwenVL.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/generate_csv_QwenVL.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/__init__.py: -------------------------------------------------------------------------------- 1 | from .model import LlavaLlamaForCausalLM 2 | -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/constants.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/constants.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/conversation.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/conversation.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_gpt_review.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_gpt_review.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_gpt_review_bench.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_gpt_review_bench.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_gpt_review_visual.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_gpt_review_visual.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_pope.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_pope.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_science_qa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_science_qa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_science_qa_gpt4.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_science_qa_gpt4.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_science_qa_gpt4_requery.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_science_qa_gpt4_requery.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_textvqa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/eval_textvqa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/generate_webpage_data_from_table.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/generate_webpage_data_from_table.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/m4c_evaluator.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/m4c_evaluator.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/model_qa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/model_qa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/model_vqa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/model_vqa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/model_vqa_loader.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/model_vqa_loader.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/model_vqa_mmbench.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/model_vqa_mmbench.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/model_vqa_science.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/model_vqa_science.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/qa_baseline_gpt35.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/qa_baseline_gpt35.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/run_llava.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/run_llava.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/summarize_gpt_review.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/summarize_gpt_review.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/answer/answer_alpaca-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/answer/answer_alpaca-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/answer/answer_bard.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/answer/answer_bard.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/answer/answer_gpt35.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/answer/answer_gpt35.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/answer/answer_llama-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/answer/answer_llama-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/answer/answer_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/answer/answer_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/caps_boxes_coco2014_val_80.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/caps_boxes_coco2014_val_80.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/model.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/model.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/prompt.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/prompt.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/question.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/question.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/results/test_sqa_llava_13b_v0.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/results/test_sqa_llava_13b_v0.json -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/results/test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/results/test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/review/review_alpaca-13b_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/review/review_alpaca-13b_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/review/review_bard_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/review/review_bard_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/review/review_gpt35_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/review/review_gpt35_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/review/review_llama-13b_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/review/review_llama-13b_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/reviewer.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/reviewer.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/rule.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/table/rule.json -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/alpaca.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/alpaca.png -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/bard.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/bard.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/chatgpt.svg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/chatgpt.svg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/llama.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/llama.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/swords_FILL0_wght300_GRAD0_opsz48.svg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/swords_FILL0_wght300_GRAD0_opsz48.svg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/vicuna.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/figures/vicuna.jpeg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/index.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/index.html -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/script.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/script.js -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/styles.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/eval/webpage/styles.css -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/mm_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/mm_utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/__init__.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/apply_delta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/apply_delta.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/consolidate.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/consolidate.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/language_model/llava_llama.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/language_model/llava_llama.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/language_model/llava_mistral.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/language_model/llava_mistral.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/language_model/llava_mpt.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/language_model/llava_mpt.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/llava_arch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/llava_arch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/make_delta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/make_delta.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/multimodal_encoder/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/multimodal_encoder/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/multimodal_encoder/clip_encoder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/multimodal_encoder/clip_encoder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/multimodal_projector/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/multimodal_projector/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/model/utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/cli.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/cli.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/controller.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/controller.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/examples/extreme_ironing.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/examples/extreme_ironing.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/examples/waterview.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/examples/waterview.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/gradio_web_server.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/gradio_web_server.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/model_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/model_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/register_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/register_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/sglang_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/sglang_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/test_message.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/serve/test_message.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/llama_flash_attn_monkey_patch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/llama_flash_attn_monkey_patch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/llama_xformers_attn_monkey_patch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/llama_xformers_attn_monkey_patch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/llava_trainer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/llava_trainer.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/train.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/train.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/train_mem.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/train_mem.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/train_xformers.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/train/train_xformers.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/model_utils/llava/utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/results/gt-chi.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/results/gt-chi.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/EMER/results/gt-eng.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/EMER/results/gt-eng.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_DeepSeekVL.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_DeepSeekVL.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_InstructBLIP.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_InstructBLIP.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_LLaVA.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_LLaVA.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_Qwen2VL.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_Qwen2VL.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_QwenVL.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_QwenVL.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_emotion_qwen.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/MER2024_emotion_qwen.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/Readme.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/Readme.md -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/__init__.py: -------------------------------------------------------------------------------- 1 | from .model import LlavaLlamaForCausalLM 2 | -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/constants.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/constants.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/conversation.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/conversation.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_gpt_review.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_gpt_review.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_gpt_review_bench.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_gpt_review_bench.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_gpt_review_visual.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_gpt_review_visual.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_pope.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_pope.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_science_qa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_science_qa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_science_qa_gpt4.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_science_qa_gpt4.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_science_qa_gpt4_requery.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_science_qa_gpt4_requery.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_textvqa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/eval_textvqa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/generate_webpage_data_from_table.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/generate_webpage_data_from_table.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/m4c_evaluator.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/m4c_evaluator.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/model_qa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/model_qa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/model_vqa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/model_vqa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/model_vqa_loader.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/model_vqa_loader.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/model_vqa_mmbench.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/model_vqa_mmbench.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/model_vqa_science.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/model_vqa_science.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/qa_baseline_gpt35.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/qa_baseline_gpt35.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/run_llava.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/run_llava.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/summarize_gpt_review.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/summarize_gpt_review.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/answer/answer_alpaca-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/answer/answer_alpaca-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/answer/answer_bard.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/answer/answer_bard.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/answer/answer_gpt35.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/answer/answer_gpt35.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/answer/answer_llama-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/answer/answer_llama-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/answer/answer_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/answer/answer_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/caps_boxes_coco2014_val_80.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/caps_boxes_coco2014_val_80.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/model.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/model.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/prompt.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/prompt.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/question.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/question.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/results/test_sqa_llava_13b_v0.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/results/test_sqa_llava_13b_v0.json -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/results/test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/results/test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/review/review_alpaca-13b_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/review/review_alpaca-13b_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/review/review_bard_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/review/review_bard_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/review/review_gpt35_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/review/review_gpt35_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/review/review_llama-13b_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/review/review_llama-13b_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/reviewer.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/reviewer.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/rule.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/table/rule.json -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/alpaca.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/alpaca.png -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/bard.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/bard.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/chatgpt.svg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/chatgpt.svg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/llama.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/llama.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/swords_FILL0_wght300_GRAD0_opsz48.svg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/swords_FILL0_wght300_GRAD0_opsz48.svg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/vicuna.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/figures/vicuna.jpeg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/index.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/index.html -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/script.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/script.js -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/styles.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/eval/webpage/styles.css -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/mm_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/mm_utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/__init__.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/apply_delta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/apply_delta.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/consolidate.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/consolidate.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/language_model/llava_llama.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/language_model/llava_llama.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/language_model/llava_mistral.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/language_model/llava_mistral.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/language_model/llava_mpt.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/language_model/llava_mpt.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/llava_arch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/llava_arch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/make_delta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/make_delta.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/multimodal_encoder/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/multimodal_encoder/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/multimodal_encoder/clip_encoder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/multimodal_encoder/clip_encoder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/multimodal_projector/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/multimodal_projector/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/model/utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/cli.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/cli.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/controller.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/controller.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/examples/extreme_ironing.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/examples/extreme_ironing.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/examples/waterview.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/examples/waterview.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/gradio_web_server.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/gradio_web_server.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/model_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/model_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/register_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/register_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/sglang_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/sglang_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/test_message.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/serve/test_message.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/llama_flash_attn_monkey_patch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/llama_flash_attn_monkey_patch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/llama_xformers_attn_monkey_patch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/llama_xformers_attn_monkey_patch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/llava_trainer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/llava_trainer.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/train.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/train.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/train_mem.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/train_mem.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/train_xformers.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/train/train_xformers.py -------------------------------------------------------------------------------- /EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/emotion_benchmarks/MER2024/model_utils/llava/utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/environment.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/environment.yaml -------------------------------------------------------------------------------- /EmoitonQwen-main/eval_data/DFEW/test(single-labeled)/set_1.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/eval_data/DFEW/test(single-labeled)/set_1.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/eval_data/DFEW/test(single-labeled)/set_2.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/eval_data/DFEW/test(single-labeled)/set_2.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/eval_data/DFEW/test(single-labeled)/set_3.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/eval_data/DFEW/test(single-labeled)/set_3.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/eval_data/DFEW/test(single-labeled)/set_4.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/eval_data/DFEW/test(single-labeled)/set_4.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/eval_data/DFEW/test(single-labeled)/set_5.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/eval_data/DFEW/test(single-labeled)/set_5.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/eval_data/EMER/dataset-v1/gt-chi.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/eval_data/EMER/dataset-v1/gt-chi.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/eval_data/EMER/dataset-v1/gt-eng.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/eval_data/EMER/dataset-v1/gt-eng.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/eval_data/EMOSET_val/dataset_infos.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/eval_data/EMOSET_val/dataset_infos.json -------------------------------------------------------------------------------- /EmoitonQwen-main/eval_data/MER2024/reference-noise.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/eval_data/MER2024/reference-noise.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/eval_data/MER2024/reference-semi.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/eval_data/MER2024/reference-semi.csv -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks.yaml -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/.gitignore: -------------------------------------------------------------------------------- 1 | config.yaml -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/Readme.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/Readme.md -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/datasets/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/datasets/base_eval_dataset.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/datasets/base_eval_dataset.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/datasets/emoset.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/datasets/emoset.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/datasets/mmbench.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/datasets/mmbench.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/datasets/mme.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/datasets/mme.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/datasets/pope.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/datasets/pope.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/datasets/scienceqa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/datasets/scienceqa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/datasets/seedbench.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/datasets/seedbench.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/datasets/textvqa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/datasets/textvqa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/datasets/vqav2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/datasets/vqav2.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/evaluate.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/evaluate.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/base_model.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/base_model.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/deepseek_vl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/deepseek_vl.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/emotion_qwen.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/emotion_qwen.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/gpt4v.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/gpt4v.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/instructblip.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/instructblip.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/llava_model.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/llava_model.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/minicpm_v.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/minicpm_v.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/__init__.py: -------------------------------------------------------------------------------- 1 | from .model import LlavaLlamaForCausalLM 2 | -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/constants.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/constants.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/conversation.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/conversation.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_gpt_review.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_gpt_review.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_gpt_review_bench.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_gpt_review_bench.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_gpt_review_visual.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_gpt_review_visual.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_pope.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_pope.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_science_qa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_science_qa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_science_qa_gpt4.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_science_qa_gpt4.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_science_qa_gpt4_requery.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_science_qa_gpt4_requery.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_textvqa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/eval_textvqa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/generate_webpage_data_from_table.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/generate_webpage_data_from_table.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/m4c_evaluator.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/m4c_evaluator.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/model_qa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/model_qa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/model_vqa.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/model_vqa.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/model_vqa_loader.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/model_vqa_loader.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/model_vqa_mmbench.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/model_vqa_mmbench.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/model_vqa_science.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/model_vqa_science.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/qa_baseline_gpt35.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/qa_baseline_gpt35.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/run_llava.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/run_llava.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/summarize_gpt_review.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/summarize_gpt_review.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/answer/answer_alpaca-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/answer/answer_alpaca-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/answer/answer_bard.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/answer/answer_bard.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/answer/answer_gpt35.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/answer/answer_gpt35.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/answer/answer_llama-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/answer/answer_llama-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/answer/answer_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/answer/answer_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/caps_boxes_coco2014_val_80.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/caps_boxes_coco2014_val_80.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/model.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/model.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/prompt.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/prompt.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/question.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/question.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/results/test_sqa_llava_13b_v0.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/results/test_sqa_llava_13b_v0.json -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/results/test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/results/test_sqa_llava_lcs_558k_sqa_12e_vicuna_v1_3_13b.json -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/review/review_alpaca-13b_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/review/review_alpaca-13b_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/review/review_bard_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/review/review_bard_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/review/review_gpt35_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/review/review_gpt35_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/review/review_llama-13b_vicuna-13b.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/review/review_llama-13b_vicuna-13b.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/reviewer.jsonl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/reviewer.jsonl -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/rule.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/table/rule.json -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/alpaca.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/alpaca.png -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/bard.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/bard.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/chatgpt.svg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/chatgpt.svg -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/llama.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/llama.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/swords_FILL0_wght300_GRAD0_opsz48.svg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/swords_FILL0_wght300_GRAD0_opsz48.svg -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/vicuna.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/figures/vicuna.jpeg -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/index.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/index.html -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/script.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/script.js -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/styles.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/eval/webpage/styles.css -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/mm_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/mm_utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/__init__.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/apply_delta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/apply_delta.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/consolidate.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/consolidate.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/language_model/llava_llama.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/language_model/llava_llama.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/language_model/llava_mistral.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/language_model/llava_mistral.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/language_model/llava_mpt.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/language_model/llava_mpt.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/llava_arch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/llava_arch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/make_delta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/make_delta.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/multimodal_encoder/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/multimodal_encoder/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/multimodal_encoder/clip_encoder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/multimodal_encoder/clip_encoder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/multimodal_projector/builder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/multimodal_projector/builder.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/model/utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/cli.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/cli.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/controller.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/controller.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/examples/extreme_ironing.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/examples/extreme_ironing.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/examples/waterview.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/examples/waterview.jpg -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/gradio_web_server.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/gradio_web_server.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/model_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/model_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/register_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/register_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/sglang_worker.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/sglang_worker.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/test_message.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/serve/test_message.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/llama_flash_attn_monkey_patch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/llama_flash_attn_monkey_patch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/llama_xformers_attn_monkey_patch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/llama_xformers_attn_monkey_patch.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/llava_trainer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/llava_trainer.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/train.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/train.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/train_mem.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/train_mem.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/train_xformers.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/train/train_xformers.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/llava/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/llava/utils.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/qwen_vl_utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/qwen_vl_utils/__init__.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/model_utils/qwen_vl_utils/vision_process.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/model_utils/qwen_vl_utils/vision_process.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/mplug_owl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/mplug_owl.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/otter_image.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/otter_image.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/otter_video.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/otter_video.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/qwen2_vl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/qwen2_vl.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/qwen_vl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/qwen_vl.py -------------------------------------------------------------------------------- /EmoitonQwen-main/general_benchmarks/models/video_chat.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/general_benchmarks/models/video_chat.py -------------------------------------------------------------------------------- /EmoitonQwen-main/infer/EmotionQwen_infer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/infer/EmotionQwen_infer.py -------------------------------------------------------------------------------- /EmoitonQwen-main/train/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/train/README.md -------------------------------------------------------------------------------- /EmoitonQwen-main/train/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /EmoitonQwen-main/train/dataset.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/train/dataset.py -------------------------------------------------------------------------------- /EmoitonQwen-main/train/ds_config_zero2.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/train/ds_config_zero2.json -------------------------------------------------------------------------------- /EmoitonQwen-main/train/ds_config_zero3.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/train/ds_config_zero3.json -------------------------------------------------------------------------------- /EmoitonQwen-main/train/finetune.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/train/finetune.py -------------------------------------------------------------------------------- /EmoitonQwen-main/train/finetune_ds.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/train/finetune_ds.sh -------------------------------------------------------------------------------- /EmoitonQwen-main/train/finetune_lora.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/train/finetune_lora.sh -------------------------------------------------------------------------------- /EmoitonQwen-main/train/merge_lora.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/train/merge_lora.py -------------------------------------------------------------------------------- /EmoitonQwen-main/train/trainer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/train/trainer.py -------------------------------------------------------------------------------- /EmoitonQwen-main/train/vision_process.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/EmoitonQwen-main/train/vision_process.py -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/LICENSE -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/README.md -------------------------------------------------------------------------------- /images/Emotion-Qwen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/images/Emotion-Qwen.png -------------------------------------------------------------------------------- /images/sample1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/24DavidHuang/Emotion-Qwen/HEAD/images/sample1.jpg --------------------------------------------------------------------------------