abhishek HF staff commited on
Commit
99512df
1 Parent(s): 63836e1
Files changed (1) hide show
  1. app.py +8 -6
app.py CHANGED
@@ -179,13 +179,15 @@ def start_training(
179
  else:
180
  config["config"]["process"][0]["train"]["disable_sampling"] = True
181
  # Save the updated config
182
- config_path = f"config/{slugged_lora_name}.yaml"
 
 
183
  with open(config_path, "w") as f:
184
  yaml.dump(config, f)
185
  if is_spaces:
186
  print("Started training with spacerunner...")
187
- # copy config to dataset_folder
188
- shutil.copy(config_path, dataset_folder)
189
  # get location of this script
190
  script_location = os.path.dirname(os.path.abspath(__file__))
191
  # copy script.py from current directory to dataset_folder
@@ -194,12 +196,12 @@ def start_training(
194
  shutil.copy(script_location + "/requirements.autotrain", dataset_folder + "/requirements.txt")
195
  # command to run autotrain spacerunner
196
  cmd = f"autotrain spacerunner --project-name {slugged_lora_name} --script-path {dataset_folder}"
197
- cmd += f" --username {profile.name} --token {oauth_token} --backend spaces-l4x1"
198
  outcome = subprocess.run(cmd)
199
  if outcome.returncode == 0:
200
  return f"""# Your training has started.
201
- ## - Training Status: <a href='https://huggingface.co/spaces/{profile.name}/autotrain-{slugged_lora_name}?logs=container'>{profile.name}/autotrain-{slugged_lora_name}</a> <small>(in the logs tab)</small>
202
- ## - Model page: <a href='https://huggingface.co/{profile.name}/{slugged_lora_name}'>{profile.name}/{slugged_lora_name}</a> <small>(will be available when training finishes)</small>"""
203
  else:
204
  print("Error: ", outcome.stderr)
205
  raise gr.Error("Something went wrong. Make sure the name of your LoRA is unique and try again")
 
179
  else:
180
  config["config"]["process"][0]["train"]["disable_sampling"] = True
181
  # Save the updated config
182
+ # generate a random name for the config
183
+ random_config_name = str(uuid.uuid4())
184
+ config_path = f"/tmp/{random_config_name}-{slugged_lora_name}.yaml"
185
  with open(config_path, "w") as f:
186
  yaml.dump(config, f)
187
  if is_spaces:
188
  print("Started training with spacerunner...")
189
+ # copy config to dataset_folder as config.yaml
190
+ shutil.copy(config_path, dataset_folder + "/config.yaml")
191
  # get location of this script
192
  script_location = os.path.dirname(os.path.abspath(__file__))
193
  # copy script.py from current directory to dataset_folder
 
196
  shutil.copy(script_location + "/requirements.autotrain", dataset_folder + "/requirements.txt")
197
  # command to run autotrain spacerunner
198
  cmd = f"autotrain spacerunner --project-name {slugged_lora_name} --script-path {dataset_folder}"
199
+ cmd += f" --username {profile.username} --token {oauth_token} --backend spaces-l4x1"
200
  outcome = subprocess.run(cmd)
201
  if outcome.returncode == 0:
202
  return f"""# Your training has started.
203
+ ## - Training Status: <a href='https://huggingface.co/spaces/{profile.username}/autotrain-{slugged_lora_name}?logs=container'>{profile.username}/autotrain-{slugged_lora_name}</a> <small>(in the logs tab)</small>
204
+ ## - Model page: <a href='https://huggingface.co/{profile.username}/{slugged_lora_name}'>{profile.username}/{slugged_lora_name}</a> <small>(will be available when training finishes)</small>"""
205
  else:
206
  print("Error: ", outcome.stderr)
207
  raise gr.Error("Something went wrong. Make sure the name of your LoRA is unique and try again")