|
356 | 356 | # create dragon models |
357 | 357 | {"model_name": "llmware/dragon-yi-6b-v0", "display_name": "dragon-yi-6b", |
358 | 358 | "model_family": "HFGenerativeModel", "model_category": "generative_local", "model_location": "hf_repo", |
359 | | - "context_window": 2048, "instruction_following": False, "prompt_wrapper": "human_bot", |
| 359 | + "context_window": 4096, "instruction_following": False, "prompt_wrapper": "human_bot", |
360 | 360 | "temperature": 0.3, "trailing_space": "\n", "link": "https://huggingface.co/llmware/dragon-yi-6b-v0", |
361 | 361 | "custom_model_files": [], "custom_model_repo": "", |
362 | 362 | "hf_repo": "llmware/dragon-yi-6b-v0"}, |
|
370 | 370 |
|
371 | 371 | {"model_name": "llmware/dragon-mistral-7b-v0", "display_name": "dragon-mistral-7b", |
372 | 372 | "model_family": "HFGenerativeModel", "model_category": "generative_local", "model_location": "hf_repo", |
373 | | - "context_window": 2048, "instruction_following": False, "prompt_wrapper": "human_bot", |
| 373 | + "context_window": 4096, "instruction_following": False, "prompt_wrapper": "human_bot", |
374 | 374 | "temperature": 0.3, "trailing_space": "", "link": "https://huggingface.co/llmware/dragon-mistral-7b-v0", |
375 | 375 | "custom_model_files": [], "custom_model_repo": "", |
376 | 376 | "hf_repo": "llmware/dragon-mistral-7b-v0"}, |
|
434 | 434 | # deprecated access to dragon-mistral-7b-gguf -> replaced by dragon-mistral-answer-tool |
435 | 435 | {"model_name": "llmware/dragon-mistral-7b-gguf", "display_name": "dragon-mistral-7b-gguf", |
436 | 436 | "model_family": "GGUFGenerativeModel", "model_category": "generative_local", "model_location": "llmware_repo", |
437 | | - "context_window": 2048, "instruction_following": False, "prompt_wrapper": "human_bot", |
| 437 | + "context_window": 4096, "instruction_following": False, "prompt_wrapper": "human_bot", |
438 | 438 | "fetch": {"module": "llmware.models", "method": "pull_model_from_hf"}, |
439 | 439 | "validation_files": ["dragon-mistral-7b-q4_k_m.gguf"], |
440 | 440 | "temperature": 0.3, "trailing_space": "", |
|
458 | 458 | # deprecated access to dragon-yi-6b-gguf -> replaced by dragon-yi-answer-tool |
459 | 459 | {"model_name": "llmware/dragon-yi-6b-gguf", "display_name": "dragon-yi-6b-gguf", |
460 | 460 | "model_family": "GGUFGenerativeModel", "model_category": "generative_local", "model_location": "llmware_repo", |
461 | | - "context_window": 2048, "instruction_following": False, "prompt_wrapper": "human_bot", |
| 461 | + "context_window": 4096, "instruction_following": False, "prompt_wrapper": "human_bot", |
462 | 462 | "temperature": 0.3, "trailing_space": "\n", |
463 | 463 | "gguf_file": "dragon-yi-6b-q4_k_m.gguf", |
464 | 464 | "gguf_repo": "llmware/dragon-yi-6b-v0", |
|
469 | 469 |
|
470 | 470 | {"model_name": "dragon-yi-answer-tool", "display_name": "dragon-yi-6b-answer-tool", |
471 | 471 | "model_family": "GGUFGenerativeModel", "model_category": "generative_local", "model_location": "llmware_repo", |
472 | | - "context_window": 2048, "instruction_following": False, "prompt_wrapper": "human_bot", |
| 472 | + "context_window": 4096, "instruction_following": False, "prompt_wrapper": "human_bot", |
473 | 473 | "temperature": 0.3, "trailing_space": "\n", |
474 | 474 | "gguf_file": "dragon-yi.gguf", |
475 | 475 | "gguf_repo": "llmware/dragon-yi-answer-tool", |
|
491 | 491 |
|
492 | 492 | {"model_name": "dragon-mistral-answer-tool", "display_name": "dragon-mistral-answer-tool", |
493 | 493 | "model_family": "GGUFGenerativeModel", "model_category": "generative_local", "model_location": "llmware_repo", |
494 | | - "context_window": 2048, "instruction_following": False, "prompt_wrapper": "human_bot", |
| 494 | + "context_window": 4096, "instruction_following": False, "prompt_wrapper": "human_bot", |
495 | 495 | "temperature": 0.3, "trailing_space": "", |
496 | 496 | "gguf_file": "dragon-mistral.gguf", |
497 | 497 | "gguf_repo": "llmware/dragon-mistral-answer-tool", |
|
514 | 514 |
|
515 | 515 | {"model_name": "TheBloke/OpenHermes-2.5-Mistral-7B-GGUF", "display_name": "openhermes-mistral-7b-gguf", |
516 | 516 | "model_family": "GGUFGenerativeModel", "model_category": "generative_local", "model_location": "llmware_repo", |
517 | | - "context_window": 2048, "instruction_following": True, "prompt_wrapper": "chat_ml", |
| 517 | + "context_window": 4096, "instruction_following": True, "prompt_wrapper": "chat_ml", |
518 | 518 | "temperature": 0.3, "trailing_space": "", |
519 | 519 | "gguf_file": "openhermes-2.5-mistral-7b.Q4_K_M.gguf", |
520 | 520 | "gguf_repo": "llmware/bonchon", |
|
525 | 525 |
|
526 | 526 | {"model_name": "TheBloke/zephyr-7B-beta-GGUF", "display_name": "zephyr-7b-gguf", |
527 | 527 | "model_family": "GGUFGenerativeModel", "model_category": "generative_local", "model_location": "llmware_repo", |
528 | | - "context_window": 2048, "instruction_following": True, "prompt_wrapper": "hf_chat", |
| 528 | + "context_window": 4096, "instruction_following": True, "prompt_wrapper": "hf_chat", |
529 | 529 | "temperature": 0.3, "trailing_space": "", |
530 | 530 | "gguf_file": "zephyr-7b-beta.Q4_K_M.gguf", |
531 | 531 | "gguf_repo": "llmware/bonchon", |
|
536 | 536 |
|
537 | 537 | {"model_name": "TheBloke/Starling-LM-7B-alpha-GGUF", "display_name": "starling-7b-gguf", |
538 | 538 | "model_family": "GGUFGenerativeModel", "model_category": "generative_local", "model_location": "llmware_repo", |
539 | | - "context_window": 2048, "instruction_following": True, "prompt_wrapper": "open_chat", |
| 539 | + "context_window": 8192, "instruction_following": True, "prompt_wrapper": "open_chat", |
540 | 540 | "temperature": 0.3, "trailing_space": "", |
541 | 541 | "gguf_file": "starling-lm-7b-alpha.Q4_K_M.gguf", |
542 | 542 | "gguf_repo": "llmware/bonchon", |
|
546 | 546 | "custom_model_files": [], "custom_model_repo": ""}, |
547 | 547 |
|
548 | 548 | {"model_name": "microsoft/Phi-3-mini-4k-instruct-gguf", "display_name": "phi-3-gguf", "model_family": "GGUFGenerativeModel", |
549 | | - "model_category": "generative_local", "model_location": "llmware_repo", "context_windows": 4096, |
| 549 | + "model_category": "generative_local", "model_location": "llmware_repo", "context_window": 4096, |
550 | 550 | "instruction_following": False, "prompt_wrapper": "phi_3", "temperature": 0.3, "trailing_space": "", |
551 | 551 | "gguf_file": "Phi-3-mini-4k-instruct-q4.gguf", |
552 | 552 | "gguf_repo": "microsoft/Phi-3-mini-4k-instruct-gguf", |
|
0 commit comments