SarowarSaurav commited on
Commit
e3f8969
·
verified ·
1 Parent(s): 1ddb8b9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -12
app.py CHANGED
@@ -9,6 +9,9 @@ from azure.ai.inference.models import (
9
  ImageDetailLevel,
10
  )
11
  from azure.core.credentials import AzureKeyCredential
 
 
 
12
 
13
  # Azure API credentials
14
  token = "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"
@@ -21,10 +24,12 @@ client = ChatCompletionsClient(
21
  credential=AzureKeyCredential(token),
22
  )
23
 
24
- # Define the function to handle the image and get predictions
 
 
 
25
  def analyze_leaf_disease(image_path, leaf_type):
26
  try:
27
- # Prepare and send the request to the Azure API
28
  response = client.complete(
29
  messages=[
30
  SystemMessage(
@@ -45,26 +50,39 @@ def analyze_leaf_disease(image_path, leaf_type):
45
  ],
46
  model=model_name,
47
  )
48
-
49
- # Extract and return the response content
50
  return response.choices[0].message.content
51
-
52
  except Exception as e:
53
  return f"An error occurred: {e}"
54
 
55
- # Define the Gradio interface
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  def handle_proceed(image_path, leaf_type):
57
- # Display detecting status
58
  detecting_status = "Detecting..."
59
  result = analyze_leaf_disease(image_path, leaf_type)
60
- # Clear detecting status after processing
61
  return "", result
62
 
63
  with gr.Blocks() as interface:
64
  with gr.Row():
65
  gr.Markdown("""
66
- # Leaf Disease Detector
67
- Upload a leaf image, select the leaf type, and let the AI analyze the disease.
68
  """)
69
 
70
  with gr.Row():
@@ -77,10 +95,23 @@ with gr.Blocks() as interface:
77
 
78
  with gr.Row():
79
  detecting_label = gr.Label("Detecting...", visible=False)
80
- output_box = gr.Textbox(label="Results", placeholder="Results will appear here.")
 
 
 
 
81
 
82
- # Update the detecting_label and result in outputs
 
 
 
83
  proceed_button.click(handle_proceed, inputs=[image_input, leaf_type], outputs=[detecting_label, output_box])
84
 
 
 
 
 
 
 
85
  if __name__ == "__main__":
86
  interface.launch()
 
9
  ImageDetailLevel,
10
  )
11
  from azure.core.credentials import AzureKeyCredential
12
+ from gtts import gTTS
13
+ from googletrans import Translator
14
+ import os
15
 
16
  # Azure API credentials
17
  token = "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"
 
24
  credential=AzureKeyCredential(token),
25
  )
26
 
27
+ # Translator instance
28
+ translator = Translator()
29
+
30
+ # Analyze leaf image
31
  def analyze_leaf_disease(image_path, leaf_type):
32
  try:
 
33
  response = client.complete(
34
  messages=[
35
  SystemMessage(
 
50
  ],
51
  model=model_name,
52
  )
 
 
53
  return response.choices[0].message.content
 
54
  except Exception as e:
55
  return f"An error occurred: {e}"
56
 
57
+ # Translate to Bangla
58
+ def translate_to_bangla(text):
59
+ try:
60
+ translated = translator.translate(text, dest="bn")
61
+ return translated.text
62
+ except Exception as e:
63
+ return f"Translation error: {e}"
64
+
65
+ # Text-to-Speech
66
+ def text_to_speech(text):
67
+ try:
68
+ tts = gTTS(text)
69
+ audio_file = "output.mp3"
70
+ tts.save(audio_file)
71
+ return audio_file
72
+ except Exception as e:
73
+ return f"TTS error: {e}"
74
+
75
+ # Main handler
76
  def handle_proceed(image_path, leaf_type):
 
77
  detecting_status = "Detecting..."
78
  result = analyze_leaf_disease(image_path, leaf_type)
 
79
  return "", result
80
 
81
  with gr.Blocks() as interface:
82
  with gr.Row():
83
  gr.Markdown("""
84
+ # 🌿 Leaf Disease Detector
85
+ Upload a leaf image, select the leaf type, and let AI analyze it. You can also listen to or translate the result.
86
  """)
87
 
88
  with gr.Row():
 
95
 
96
  with gr.Row():
97
  detecting_label = gr.Label("Detecting...", visible=False)
98
+ output_box = gr.Textbox(label="Results", placeholder="Results will appear here.", lines=10)
99
+
100
+ with gr.Row():
101
+ tts_button = gr.Button("🔊 Read Aloud")
102
+ tts_audio = gr.Audio(label="Audio", autoplay=True)
103
 
104
+ translate_button = gr.Button("🌐 Translate to Bangla")
105
+ translated_output = gr.Textbox(label="বাংলা অনুবাদ", placeholder="Bangla translation will appear here.", lines=10)
106
+
107
+ # Main prediction
108
  proceed_button.click(handle_proceed, inputs=[image_input, leaf_type], outputs=[detecting_label, output_box])
109
 
110
+ # Text-to-speech
111
+ tts_button.click(text_to_speech, inputs=[output_box], outputs=[tts_audio])
112
+
113
+ # Translate
114
+ translate_button.click(translate_to_bangla, inputs=[output_box], outputs=[translated_output])
115
+
116
  if __name__ == "__main__":
117
  interface.launch()