Elron commited on
Commit
cf8846d
·
verified ·
1 Parent(s): 6b4ba3d

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. inference.py +12 -1
  2. metrics.py +2 -1
  3. version.py +1 -1
inference.py CHANGED
@@ -1461,7 +1461,18 @@ class OllamaInferenceEngine(
1461
  options=args,
1462
  )
1463
  results.append(response)
1464
-
 
 
 
 
 
 
 
 
 
 
 
1465
  return [element["message"]["content"] for element in results]
1466
 
1467
 
 
1461
  options=args,
1462
  )
1463
  results.append(response)
1464
+ if return_meta_data:
1465
+ return [
1466
+ TextGenerationInferenceOutput(
1467
+ prediction=element["message"]["content"],
1468
+ generated_text=element["message"]["content"],
1469
+ input_tokens=element.get("prompt_eval_count", 0),
1470
+ output_tokens=element.get("eval_count", 0),
1471
+ model_name=self.model,
1472
+ inference_type=self.label,
1473
+ )
1474
+ for element in results
1475
+ ]
1476
  return [element["message"]["content"] for element in results]
1477
 
1478
 
metrics.py CHANGED
@@ -28,7 +28,6 @@ from typing import (
28
  Union,
29
  )
30
 
31
- import evaluate
32
  import numpy
33
  import numpy as np
34
  import pandas as pd
@@ -82,6 +81,8 @@ warnings.filterwarnings("ignore", category=DegenerateDataWarning)
82
 
83
  @retry_connection_with_exponential_backoff(backoff_factor=2)
84
  def hf_evaluate_load(path: str, *args, **kwargs):
 
 
85
  if settings.hf_offline_metrics_path is not None:
86
  path = os.path.join(settings.hf_offline_metrics_path, path)
87
  return evaluate.load(
 
28
  Union,
29
  )
30
 
 
31
  import numpy
32
  import numpy as np
33
  import pandas as pd
 
81
 
82
  @retry_connection_with_exponential_backoff(backoff_factor=2)
83
  def hf_evaluate_load(path: str, *args, **kwargs):
84
+ import evaluate
85
+
86
  if settings.hf_offline_metrics_path is not None:
87
  path = os.path.join(settings.hf_offline_metrics_path, path)
88
  return evaluate.load(
version.py CHANGED
@@ -1 +1 @@
1
- version = "1.26.7"
 
1
+ version = "1.26.8"