Skip to content

Commit d741fb5

Browse files
- Create Separate Enum For Audio Models. - Update Audio Example To MVVM.
1 parent 3ab708d commit d741fb5

File tree

4 files changed

+102
-46
lines changed

4 files changed

+102
-46
lines changed

Example/Example.xcodeproj/project.pbxproj

+4
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
777D472B2A53EA9D0050C0E7 /* ChatGPTAPIManager in Frameworks */ = {isa = PBXBuildFile; productRef = 777D472A2A53EA9D0050C0E7 /* ChatGPTAPIManager */; };
1111
777D472F2A53F7560050C0E7 /* english_song.m4a in Resources */ = {isa = PBXBuildFile; fileRef = 777D472D2A53F7550050C0E7 /* english_song.m4a */; };
1212
777D47302A53F7560050C0E7 /* translation_file.m4a in Resources */ = {isa = PBXBuildFile; fileRef = 777D472E2A53F7550050C0E7 /* translation_file.m4a */; };
13+
777D47322A5419150050C0E7 /* AudioViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 777D47312A5419150050C0E7 /* AudioViewModel.swift */; };
1314
A8B85FCD2A431318005FBEDE /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8B85FCC2A431318005FBEDE /* AppDelegate.swift */; };
1415
A8B85FCF2A431318005FBEDE /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8B85FCE2A431318005FBEDE /* SceneDelegate.swift */; };
1516
A8B85FD12A431318005FBEDE /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8B85FD02A431318005FBEDE /* ViewController.swift */; };
@@ -55,6 +56,7 @@
5556
777D47282A53EA8F0050C0E7 /* chatgpt-ios-sdk */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = "chatgpt-ios-sdk"; path = ..; sourceTree = "<group>"; };
5657
777D472D2A53F7550050C0E7 /* english_song.m4a */ = {isa = PBXFileReference; lastKnownFileType = file; path = english_song.m4a; sourceTree = "<group>"; };
5758
777D472E2A53F7550050C0E7 /* translation_file.m4a */ = {isa = PBXFileReference; lastKnownFileType = file; path = translation_file.m4a; sourceTree = "<group>"; };
59+
777D47312A5419150050C0E7 /* AudioViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioViewModel.swift; sourceTree = "<group>"; };
5860
A8B85FC92A431318005FBEDE /* Example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Example.app; sourceTree = BUILT_PRODUCTS_DIR; };
5961
A8B85FCC2A431318005FBEDE /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
6062
A8B85FCE2A431318005FBEDE /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = "<group>"; };
@@ -195,6 +197,7 @@
195197
A8B8602C2A4413F2005FBEDE /* ChatViewModel.swift */,
196198
A8B8602E2A441409005FBEDE /* ImageViewModel.swift */,
197199
A8B860332A44431C005FBEDE /* TextGenerationViewModel.swift */,
200+
777D47312A5419150050C0E7 /* AudioViewModel.swift */,
198201
);
199202
path = ViewModles;
200203
sourceTree = "<group>";
@@ -412,6 +415,7 @@
412415
A8B85FD12A431318005FBEDE /* ViewController.swift in Sources */,
413416
A8B8600E2A431AA6005FBEDE /* ChatViewController.swift in Sources */,
414417
A8B860252A440C52005FBEDE /* MessageModel.swift in Sources */,
418+
777D47322A5419150050C0E7 /* AudioViewModel.swift in Sources */,
415419
A8B860102A431AA6005FBEDE /* EZLoadingActivity.swift in Sources */,
416420
A8B85FCD2A431318005FBEDE /* AppDelegate.swift in Sources */,
417421
A8B85FCF2A431318005FBEDE /* SceneDelegate.swift in Sources */,
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
//
2+
// AudioViewModel.swift
3+
// Example
4+
//
5+
// Created by Najam us Saqib on 7/4/23.
6+
//
7+
8+
import Foundation
9+
import ChatGPTAPIManager
10+
11+
protocol AudioViewModelProtocol {
12+
func audioTranscriptions(url: URL, completioin: @escaping(Result<String, Error>) -> Void)
13+
func audioTranslation(url: URL, completioin: @escaping(Result<String, Error>) -> Void)
14+
}
15+
16+
class AudioViewModel: AudioViewModelProtocol {
17+
18+
func audioTranscriptions(url: URL, completioin: @escaping(Result<String, Error>) -> Void) {
19+
EZLoadingActivity.show("Loading...", disableUI: true)
20+
ChatGPTAPIManager.shared.audioTranscriptionRequest(fileUrl: url, prompt: "Translate this audio", language: "en", model: .whisper1, endPoint: .transcriptions, completion: { result in
21+
22+
switch result {
23+
case.success(let text):
24+
print(text)
25+
completioin(.success(text))
26+
EZLoadingActivity.hide(true, animated: true)
27+
28+
case.failure(let error):
29+
print(error)
30+
completioin(.failure(error))
31+
EZLoadingActivity.hide(false, animated: true)
32+
}
33+
34+
})
35+
}
36+
37+
func audioTranslation(url: URL, completioin: @escaping(Result<String, Error>) -> Void) {
38+
EZLoadingActivity.show("Loading...", disableUI: true)
39+
ChatGPTAPIManager.shared.audioTranslationRequest(fileUrl: url, temperature: 0.8, model: .whisper1, endPoint: .translations, completion: { result in
40+
41+
switch result {
42+
case.success(let text):
43+
print(text)
44+
completioin(.success(text))
45+
EZLoadingActivity.hide(true, animated: true)
46+
47+
case.failure(let error):
48+
print(error)
49+
completioin(.failure(error))
50+
EZLoadingActivity.hide(false, animated: true)
51+
}
52+
53+
})
54+
}
55+
56+
}

Example/Example/Views/AudioTranslationAndTranscript/AudioViewController.swift

+33-39
Original file line numberDiff line numberDiff line change
@@ -12,55 +12,49 @@ class AudioViewController: UIViewController {
1212

1313
@IBOutlet weak var textView: UITextView!
1414

15+
// MARK: - Variables
16+
var vm = AudioViewModel()
17+
1518
override func viewDidLoad() {
1619
super.viewDidLoad()
17-
1820
}
21+
1922
@IBAction func audioTranslation(sender: UIButton) {
2023
if let fileURL = Bundle.main.url(forResource: "translation_file", withExtension: "m4a") {
21-
self.audioTranslation(url: fileURL)
24+
vm.audioTranslation(url: fileURL) { result in
25+
26+
switch result {
27+
case .success(let audioText):
28+
DispatchQueue.main.async {
29+
self.textView.text = audioText
30+
}
31+
case .failure(let failure):
32+
DispatchQueue.main.async {
33+
self.textView.text = failure.localizedDescription
34+
}
35+
}
36+
37+
}
2238
}
2339
}
40+
2441
@IBAction func audioTranscription(sender: UIButton) {
2542
if let fileURL = Bundle.main.url(forResource: "english_song", withExtension: "m4a") {
26-
self.audioTranscriptions(url: fileURL)
27-
}
28-
}
29-
30-
func audioTranslation(url: URL) {
31-
EZLoadingActivity.show("Loading...", disableUI: true)
32-
ChatGPTAPIManager.shared.audioTranslationRequest(fileUrl: url, temperature: 0.8, model: .whisper1, endPoint: .translations, completion: { result in
33-
34-
switch result {
35-
case.success(let success):
36-
print(success)
37-
DispatchQueue.main.async {
38-
self.textView.text = success
39-
EZLoadingActivity.hide(true, animated: true)
40-
}
41-
case.failure(let error):
42-
print(error)
43-
self.textView.text = "\(error)"
44-
EZLoadingActivity.hide(false, animated: true)
45-
}
46-
})
47-
}
48-
func audioTranscriptions(url: URL) {
49-
EZLoadingActivity.show("Loading...", disableUI: true)
50-
ChatGPTAPIManager.shared.audioTranscriptionRequest(fileUrl: url, prompt: "Translate this audio", language: "en", model: .whisper1, endPoint: .transcriptions, completion: { result in
51-
52-
switch result {
53-
case.success(let success):
54-
print(success)
55-
DispatchQueue.main.async {
56-
self.textView.text = success
57-
EZLoadingActivity.hide(true, animated: true)
43+
vm.audioTranscriptions(url: fileURL) { result in
44+
45+
switch result {
46+
case .success(let audioText):
47+
DispatchQueue.main.async {
48+
self.textView.text = audioText
49+
}
50+
case .failure(let failure):
51+
DispatchQueue.main.async {
52+
self.textView.text = failure.localizedDescription
53+
}
5854
}
59-
case.failure(let error):
60-
print(error)
61-
self.textView.text = "\(error)"
62-
EZLoadingActivity.hide(false, animated: true)
55+
6356
}
64-
})
57+
}
6558
}
59+
6660
}

Sources/ChatGPTAPIManager/ChatGPTAPIManager.swift

+9-7
Original file line numberDiff line numberDiff line change
@@ -112,9 +112,11 @@ public enum ChatGPTModels: String {
112112
// Edits
113113
case textDavinci001 = "text-davinci-001"
114114
case codeDavinciEdit001 = "code-davinci-edit-001"
115-
116-
// Transcriptions / Translations
117-
case whisper1 = "whisper-1"
115+
}
116+
117+
// MARK: - AudioGPTModels Enum
118+
public enum AudioGPTModels: String {
119+
case whisper1 = "whisper-1"
118120
}
119121

120122
/// Enum representing different ImageSizes supported by imagegeneration API.
@@ -191,7 +193,7 @@ final public class ChatGPTAPIManager {
191193
/// - completion: A completion handler to be called with the result of the transcription request.
192194
/// The handler takes a Result object, which contains either the transcribed text or an error.
193195
/// Use the `.success` case to access the transcribed text and the `.failure` case to handle errors.
194-
public func audioTranscriptionRequest(fileUrl: URL, prompt: String? = nil, temperature: Double? = nil, language: String? = nil, model: ChatGPTModels = .whisper1, endPoint: APPURL, completion: @escaping (Result<String, Error>) -> Void) {
196+
public func audioTranscriptionRequest(fileUrl: URL, prompt: String? = nil, temperature: Double? = nil, language: String? = nil, model: AudioGPTModels = .whisper1, endPoint: APPURL, completion: @escaping (Result<String, Error>) -> Void) {
195197
self.audioTranscription(fileUrl: fileUrl, prompt: prompt, temperature: temperature, language: language, model: model, endPoint: endPoint, completion: completion)
196198
}
197199
/// Requests audio translation based on the provided parameters.
@@ -203,7 +205,7 @@ final public class ChatGPTAPIManager {
203205
/// - model: The ChatGPT model to use for translation.
204206
/// - endPoint: The endpoint URL for the API request.
205207
/// - completion: The completion block called with the result of the request. The block receives a Result object containing either the translated text as a String in case of success, or an Error in case of failure.
206-
public func audioTranslationRequest(fileUrl: URL, prompt: String? = nil, temperature: Double? = nil, model: ChatGPTModels = .whisper1, endPoint: APPURL, completion: @escaping (Result<String, Error>) -> Void) {
208+
public func audioTranslationRequest(fileUrl: URL, prompt: String? = nil, temperature: Double? = nil, model: AudioGPTModels = .whisper1, endPoint: APPURL, completion: @escaping (Result<String, Error>) -> Void) {
207209
self.audioTranslation(fileUrl: fileUrl,prompt: prompt, temperature: temperature, model: model, endPoint: endPoint, completion: completion)
208210
}
209211

@@ -338,7 +340,7 @@ final public class ChatGPTAPIManager {
338340

339341
}
340342

341-
private func audioTranscription(fileUrl: URL, prompt: String? = nil, temperature: Double? = nil, language: String? = nil, model: ChatGPTModels, endPoint: APPURL, completion: @escaping (Result<String, Error>) -> Void) {
343+
private func audioTranscription(fileUrl: URL, prompt: String? = nil, temperature: Double? = nil, language: String? = nil, model: AudioGPTModels, endPoint: APPURL, completion: @escaping (Result<String, Error>) -> Void) {
342344

343345
// Define the key-value pairs
344346
var parameters: [String: Any] = [
@@ -385,7 +387,7 @@ final public class ChatGPTAPIManager {
385387

386388
}
387389

388-
private func audioTranslation(fileUrl: URL, prompt: String? = nil, temperature: Double? = nil, model: ChatGPTModels, endPoint: APPURL, completion: @escaping (Result<String, Error>) -> Void) {
390+
private func audioTranslation(fileUrl: URL, prompt: String? = nil, temperature: Double? = nil, model: AudioGPTModels, endPoint: APPURL, completion: @escaping (Result<String, Error>) -> Void) {
389391

390392
var parameters: [String: Any] = [
391393
"model": model.rawValue

0 commit comments

Comments
 (0)