Upload results2/20240723-141344-naturalqs_open-2-tasks.jsonl with huggingface_hub
Browse files
results2/20240723-141344-naturalqs_open-2-tasks.jsonl
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
{"task_name": "naturalqs_open", "task_hash": "2e917b2e77e13efc9fb4ab43808bf8bd", "model_hash": "3cde4ce15df7437159bcec453d12b531", "model_config": {"model": "meta-llama/Meta-Llama-3-8B", "revision": null, "trust_remote_code": null, "max_length": 2048, "model_path": null, "model_type": "hf"}, "task_config": {"dataset_path": "google-research-datasets/nq_open", "native_id_field": "index", "primary_metric": "f1", "fewshot_source": null, "split": "validation", "context_kwargs": {"description": null}, "generation_kwargs": {"max_gen_toks": 50, "temperature": 0.0, "do_sample": false, "stop_sequences": ["Question:", "Q:", "\n\n"]}, "limit": 250, "random_subsample_seed": 1234, "metric_kwargs": null, "num_shots": 0, "fewshot_seed": 1234, "dataset_name": null, "task_name": "naturalqs_open", "version": 0, "task_core": "naturalqs_open"}, "compute_config": {"batch_size": "2", "max_batch_size": 32, "output_dir": "/results", "num_recorded_inputs": 3, "save_raw_requests": true, "check_datalake": false}, "processing_time": 254.79988026618958, "current_date": "2024-07-19 23:15:03 UTC", "num_instances": 250, "beaker_info": {"BEAKER_EXPERIMENT_ID": "01J36M2W5BPEPF6FMPBX0FXZCK", "BEAKER_ASSIGNED_GPU_COUNT": "2", "BEAKER_WORKLOAD_ID": "01J36M2W5BPEPF6FMPBX0FXZCK", "BEAKER_TASK_ID": "01J36M2W5H6JKX200QEQY8Z39B", "BEAKER_NODE_ID": "01GY0YRRK4G1QFV8X6X74F8F52", "BEAKER_JOB_ID": "01J36M3065DHBNN2K7WVFGY1T6", "BEAKER_ASSIGNED_CPU_COUNT": "31", "BEAKER_JOB_KIND": "batch", "GIT_REF": "02cb6075619b15ce770de80953a8093e8f0cebd6", "BEAKER_NODE_HOSTNAME": "s2-cirrascale-09.reviz.ai2.in"}, "metrics": {"exact_match": 0.09200000000000005, "f1": 0.15987999999999997, "primary_score": 0.15987999999999997}}
|
2 |
+
{"task_name": "drop", "task_hash": "193bdff4bceff956f4329d562f15ada6", "model_hash": "3cde4ce15df7437159bcec453d12b531", "model_config": {"model": "meta-llama/Meta-Llama-3-8B", "revision": null, "trust_remote_code": null, "max_length": 2048, "model_path": null, "model_type": "hf"}, "task_config": {"dataset_path": "EleutherAI/drop", "native_id_field": "query_id", "primary_metric": "f1", "fewshot_source": null, "split": "validation", "context_kwargs": {"passage_prefix": "Passage: "}, "generation_kwargs": {"max_gen_toks": 50, "temperature": 0.0, "do_sample": false, "stop_sequences": ["Passage:", "Question:", "\n\n"]}, "limit": 250, "random_subsample_seed": 1234, "num_shots": 3, "metric_kwargs": null, "fewshot_seed": 1234, "dataset_name": null, "task_name": "drop", "version": 1, "task_core": "drop"}, "compute_config": {"batch_size": "2", "max_batch_size": 32, "output_dir": "/results", "num_recorded_inputs": 3, "save_raw_requests": true, "check_datalake": false}, "processing_time": 328.48001766204834, "current_date": "2024-07-19 23:20:31 UTC", "num_instances": 250, "beaker_info": {"BEAKER_EXPERIMENT_ID": "01J36M2W5BPEPF6FMPBX0FXZCK", "BEAKER_ASSIGNED_GPU_COUNT": "2", "BEAKER_WORKLOAD_ID": "01J36M2W5BPEPF6FMPBX0FXZCK", "BEAKER_TASK_ID": "01J36M2W5H6JKX200QEQY8Z39B", "BEAKER_NODE_ID": "01GY0YRRK4G1QFV8X6X74F8F52", "BEAKER_JOB_ID": "01J36M3065DHBNN2K7WVFGY1T6", "BEAKER_ASSIGNED_CPU_COUNT": "31", "BEAKER_JOB_KIND": "batch", "GIT_REF": "02cb6075619b15ce770de80953a8093e8f0cebd6", "BEAKER_NODE_HOSTNAME": "s2-cirrascale-09.reviz.ai2.in"}, "metrics": {"exact_match": 0.5600000000000004, "f1": 0.5876000000000003, "primary_score": 0.5876000000000003}}
|