jhj0517 commited on
Commit
711929b
1 Parent(s): 92376eb

Update paths for UVR

Browse files
Files changed (1) hide show
  1. app.py +11 -3
app.py CHANGED
@@ -4,9 +4,11 @@ import gradio as gr
4
  import yaml
5
 
6
  from modules.utils.paths import (FASTER_WHISPER_MODELS_DIR, DIARIZATION_MODELS_DIR, OUTPUT_DIR, WHISPER_MODELS_DIR,
7
- INSANELY_FAST_WHISPER_MODELS_DIR, NLLB_MODELS_DIR, DEFAULT_PARAMETERS_CONFIG_PATH)
 
8
  from modules.utils.files_manager import load_yaml
9
  from modules.whisper.whisper_factory import WhisperFactory
 
10
  from modules.whisper.faster_whisper_inference import FasterWhisperInference
11
  from modules.whisper.insanely_fast_whisper_inference import InsanelyFastWhisperInference
12
  from modules.translation.nllb_inference import NLLBInference
@@ -27,8 +29,6 @@ class App:
27
  insanely_fast_whisper_model_dir=self.args.insanely_fast_whisper_model_dir,
28
  output_dir=self.args.output_dir,
29
  )
30
- print(f"Use \"{self.args.whisper_type}\" implementation")
31
- print(f"Device \"{self.whisper_inf.device}\" is detected")
32
  self.nllb_inf = NLLBInference(
33
  model_dir=self.args.nllb_model_dir,
34
  output_dir=os.path.join(self.args.output_dir, "translations")
@@ -36,7 +36,13 @@ class App:
36
  self.deepl_api = DeepLAPI(
37
  output_dir=os.path.join(self.args.output_dir, "translations")
38
  )
 
 
 
 
39
  self.default_params = load_yaml(DEFAULT_PARAMETERS_CONFIG_PATH)
 
 
40
 
41
  def create_whisper_parameters(self):
42
  whisper_params = self.default_params["whisper"]
@@ -383,6 +389,8 @@ parser.add_argument('--diarization_model_dir', type=str, default=DIARIZATION_MOD
383
  help='Directory path of the diarization model')
384
  parser.add_argument('--nllb_model_dir', type=str, default=NLLB_MODELS_DIR,
385
  help='Directory path of the Facebook NLLB model')
 
 
386
  parser.add_argument('--output_dir', type=str, default=OUTPUT_DIR, help='Directory path of the outputs')
387
  _args = parser.parse_args()
388
 
 
4
  import yaml
5
 
6
  from modules.utils.paths import (FASTER_WHISPER_MODELS_DIR, DIARIZATION_MODELS_DIR, OUTPUT_DIR, WHISPER_MODELS_DIR,
7
+ INSANELY_FAST_WHISPER_MODELS_DIR, NLLB_MODELS_DIR, DEFAULT_PARAMETERS_CONFIG_PATH,
8
+ UVR_MODELS_DIR)
9
  from modules.utils.files_manager import load_yaml
10
  from modules.whisper.whisper_factory import WhisperFactory
11
+ from modules.uvr.music_separator import MusicSeparator
12
  from modules.whisper.faster_whisper_inference import FasterWhisperInference
13
  from modules.whisper.insanely_fast_whisper_inference import InsanelyFastWhisperInference
14
  from modules.translation.nllb_inference import NLLBInference
 
29
  insanely_fast_whisper_model_dir=self.args.insanely_fast_whisper_model_dir,
30
  output_dir=self.args.output_dir,
31
  )
 
 
32
  self.nllb_inf = NLLBInference(
33
  model_dir=self.args.nllb_model_dir,
34
  output_dir=os.path.join(self.args.output_dir, "translations")
 
36
  self.deepl_api = DeepLAPI(
37
  output_dir=os.path.join(self.args.output_dir, "translations")
38
  )
39
+ self.music_separator = MusicSeparator(
40
+ model_dir=self.args.uvr_model_dir,
41
+ output_dir=os.path.join(self.args.output_dir, "UVR")
42
+ )
43
  self.default_params = load_yaml(DEFAULT_PARAMETERS_CONFIG_PATH)
44
+ print(f"Use \"{self.args.whisper_type}\" implementation")
45
+ print(f"Device \"{self.whisper_inf.device}\" is detected")
46
 
47
  def create_whisper_parameters(self):
48
  whisper_params = self.default_params["whisper"]
 
389
  help='Directory path of the diarization model')
390
  parser.add_argument('--nllb_model_dir', type=str, default=NLLB_MODELS_DIR,
391
  help='Directory path of the Facebook NLLB model')
392
+ parser.add_argument('--uvr_model_dir', type=str, default=UVR_MODELS_DIR,
393
+ help='Directory path of the UVR model')
394
  parser.add_argument('--output_dir', type=str, default=OUTPUT_DIR, help='Directory path of the outputs')
395
  _args = parser.parse_args()
396