ajalisatgi commited on
Commit
fd9d58f
·
verified ·
1 Parent(s): 2e77d5f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -12
app.py CHANGED
@@ -6,7 +6,6 @@ import time
6
  from langchain_community.embeddings import HuggingFaceEmbeddings
7
  import torch
8
  import psutil
9
- import GPUtil
10
 
11
  # Set up logging
12
  logging.basicConfig(level=logging.INFO)
@@ -23,20 +22,17 @@ embedding_model.client.to(device)
23
 
24
  # Load datasets
25
  datasets = {}
26
- dataset_names = ['covidqa', 'hotpotqa', 'pubmedqa'] # Starting with key datasets
27
 
28
  for name in dataset_names:
29
  datasets[name] = load_dataset("rungalileo/ragbench", name, split='train')
30
  logger.info(f"Loaded {name}")
31
 
32
  def get_system_metrics():
33
- metrics = {
34
  'cpu_percent': psutil.cpu_percent(),
35
- 'memory_percent': psutil.virtual_memory().percent,
36
- 'gpu_util': GPUtil.getGPUs()[0].load * 100 if torch.cuda.is_available() else 0,
37
- 'gpu_memory': GPUtil.getGPUs()[0].memoryUtil * 100 if torch.cuda.is_available() else 0
38
  }
39
- return metrics
40
 
41
  def process_query(query, dataset_choice="all"):
42
  start_time = time.time()
@@ -62,7 +58,6 @@ def process_query(query, dataset_choice="all"):
62
  temperature=0.7,
63
  )
64
 
65
- # Get performance metrics
66
  metrics = get_system_metrics()
67
  metrics['processing_time'] = time.time() - start_time
68
 
@@ -70,16 +65,13 @@ def process_query(query, dataset_choice="all"):
70
  Processing Time: {metrics['processing_time']:.2f}s
71
  CPU Usage: {metrics['cpu_percent']}%
72
  Memory Usage: {metrics['memory_percent']}%
73
- GPU Utilization: {metrics['gpu_util']:.1f}%
74
- GPU Memory: {metrics['gpu_memory']:.1f}%
75
  """
76
 
77
  return response.choices[0].message.content.strip(), metrics_display
78
 
79
  except Exception as e:
80
- return str(e), "Metrics collection in progress"
81
 
82
- # Create Gradio interface
83
  demo = gr.Interface(
84
  fn=process_query,
85
  inputs=[
 
6
  from langchain_community.embeddings import HuggingFaceEmbeddings
7
  import torch
8
  import psutil
 
9
 
10
  # Set up logging
11
  logging.basicConfig(level=logging.INFO)
 
22
 
23
  # Load datasets
24
  datasets = {}
25
+ dataset_names = ['covidqa', 'hotpotqa', 'pubmedqa']
26
 
27
  for name in dataset_names:
28
  datasets[name] = load_dataset("rungalileo/ragbench", name, split='train')
29
  logger.info(f"Loaded {name}")
30
 
31
  def get_system_metrics():
32
+ return {
33
  'cpu_percent': psutil.cpu_percent(),
34
+ 'memory_percent': psutil.virtual_memory().percent
 
 
35
  }
 
36
 
37
  def process_query(query, dataset_choice="all"):
38
  start_time = time.time()
 
58
  temperature=0.7,
59
  )
60
 
 
61
  metrics = get_system_metrics()
62
  metrics['processing_time'] = time.time() - start_time
63
 
 
65
  Processing Time: {metrics['processing_time']:.2f}s
66
  CPU Usage: {metrics['cpu_percent']}%
67
  Memory Usage: {metrics['memory_percent']}%
 
 
68
  """
69
 
70
  return response.choices[0].message.content.strip(), metrics_display
71
 
72
  except Exception as e:
73
+ return str(e), "Performance metrics available on next query"
74
 
 
75
  demo = gr.Interface(
76
  fn=process_query,
77
  inputs=[