synthetic_efficiency:random=None,model=openai_text-ada-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-ada-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-ada-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-ada-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-ada-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-babbage-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-curie-001,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-002,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1024,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=1536,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=256,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=openai_text-davinci-003,tokenizer=huggingface_gpt2,num_prompt_tokens=512,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1024,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1024,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1024,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1024,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1024,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1024,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1024,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1536,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1536,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1536,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1536,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1536,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1536,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=1536,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=256,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=256,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=256,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=256,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=256,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=256,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=256,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=512,num_output_tokens=1 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=512,num_output_tokens=16 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=512,num_output_tokens=2 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=512,num_output_tokens=32 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=512,num_output_tokens=4 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=512,num_output_tokens=64 | generation |
synthetic_efficiency:random=None,model=together_bloom,tokenizer=bigscience_bloom,num_prompt_tokens=512,num_output_tokens=8 | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1,num_output_tokens=1,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1,num_output_tokens=16,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1,num_output_tokens=2,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1,num_output_tokens=32,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1,num_output_tokens=4,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1,num_output_tokens=64,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1,num_output_tokens=8,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1024,num_output_tokens=1,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1024,num_output_tokens=16,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1024,num_output_tokens=2,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1024,num_output_tokens=32,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1024,num_output_tokens=4,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1024,num_output_tokens=64,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1024,num_output_tokens=8,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1536,num_output_tokens=1,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1536,num_output_tokens=16,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1536,num_output_tokens=2,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1536,num_output_tokens=32,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1536,num_output_tokens=4,stop=hash | generation |
synthetic_efficiency:random=None,model=together_glm,tokenizer=tsinghua_glm,num_prompt_tokens=1536,num_output_tokens=64,stop=hash | generation |