fantasyfish commited on
Commit
239a35e
1 Parent(s): 356b3b9

update two server.py files

Browse files
Files changed (2) hide show
  1. inference_server/server.py +76 -16
  2. train_server/server.py +34 -17
inference_server/server.py CHANGED
@@ -1,15 +1,31 @@
1
  '''
2
  sudo docker run --gpus all --runtime=nvidia --rm \
3
- -v /home/fantasyfish/Desktop/dotdemo/third_party:/third_party \
4
- -v /home/fantasyfish/Desktop/dotdemo/examples:/inputDir \
5
- -v /home/fantasyfish/Desktop/dotdemo/logs:/logs \
6
- -v /home/fantasyfish/Desktop/dotdemo/results:/outputDir \
7
- -v /home/fantasyfish/Desktop/dotdemo/inference_server:/app \
8
- -p 8081:8081 \
9
- -it rvc:v0
10
- curl -X GET http://localhost:8081/inference \
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  -H 'Content-Type: application/json' \
12
- -d '{"expName":"drake-20","audioFileName":"radwimps.mp3", "pitchShift": 0, "saveFileName":"radwimps_drake-20.wav"}'
13
  '''
14
  import json
15
  import os
@@ -18,16 +34,22 @@ from logging import exception
18
  import time
19
  from server_utils import model_inference_single
20
  from glob import glob
 
 
21
 
22
  print("import successful!")
23
 
24
- app = Flask(__name__)
 
 
25
 
26
- @app.route("/ping")
 
27
  def healthcheck():
28
  return json.dumps({"code": 200, "message": "responding"}).encode('utf-8')
29
 
30
- @app.route("/inference", methods=['GET'])
 
31
  def inference():
32
  if request.headers['Content-Type'] != 'application/json':
33
  exception("Header error")
@@ -35,12 +57,12 @@ def inference():
35
  try:
36
  content = request.get_json()
37
  exp_name = content['expName']
38
- audio_file_name = content['audioFileName']
39
  pitch_shift = content['pitchShift']
40
- save_file_name = content['saveFileName']
41
 
42
  model_path = exp_name + '.pth'
43
- audio_path = os.path.join('/inputDir', audio_file_name)
44
  if not os.path.exists('/third_party/RVC/weights/{}'.format(model_path)):
45
  exception("Model doesn't exist")
46
  return json.dumps({"message":"Model doesn't exist"}), 404
@@ -48,7 +70,10 @@ def inference():
48
  exception("Audio file doesn't exist")
49
  return json.dumps({"message":"Audio file doesn't exist"}), 404
50
 
51
- save_path = os.path.join('/outputDir', save_file_name)
 
 
 
52
  error_log_path = os.path.join("/logs{}.log".format(exp_name))
53
  index_path_list = glob("/third_party/RVC/logs/{}/added_IVF*_Flat_nprobe_1_v1.index".format(exp_name))
54
  index_path = index_path_list[0] if len(index_path_list) > 0 else ""
@@ -62,5 +87,40 @@ def inference():
62
  exception("Training process failed")
63
  return json.dumps({"message":"Inference process failed due to {}".format(e)}), 500
64
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
  if __name__ == "__main__":
66
  app.run(host="0.0.0.0", port=8081, debug=True)
 
1
  '''
2
  sudo docker run --gpus all --runtime=nvidia --rm \
3
+ -v /home/ubuntu/dotdemo/third_party:/third_party \
4
+ -v /home/ubuntu/dotdemo-dev:/dotdemo-dev \
5
+ -v /home/ubuntu/dot-demo-assets/ml-logs:/logs \
6
+ -v /home/ubuntu/audio-inference-output:/outputDir \
7
+ -v /home/ubuntu/dotdemo/inference_server:/app \
8
+ --network="host" \
9
+ -it fantasyfish677/rvc:v0 /bin/bash
10
+ pip3 install flask_cors
11
+ python3 /app/server.py 2>&1 | tee /logs/inference_server.log
12
+
13
+ export FLASK_APP=server
14
+ export FLASK_DEBUG=true
15
+ pip3 install gunicorn
16
+ gunicorn -b 0.0.0.0:8080 -w 4 --timeout=600 server:app
17
+
18
+ curl -X POST http://3.16.130.199:8081/ping
19
+
20
+ curl -X POST http://3.16.130.199:8081/available-models
21
+
22
+ curl -X POST http://3.16.130.199:8081/inference \
23
+ -H 'Content-Type: application/json' \
24
+ -d '{"expName":"CashMoney","audioFilePath":"radwimps.mp3", "pitchShift": 0, "saveFilePath":"radwimps_CashMoney.wav"}'
25
+
26
+ curl -X POST http://3.16.130.199:8081/delete \
27
  -H 'Content-Type: application/json' \
28
+ -d '{"modelName":"drake-100"}'
29
  '''
30
  import json
31
  import os
 
34
  import time
35
  from server_utils import model_inference_single
36
  from glob import glob
37
+ from flask_cors import CORS, cross_origin
38
+ import shutil
39
 
40
  print("import successful!")
41
 
42
+ app = Flask("inference server")
43
+ cors = CORS(app)
44
+ app.config['CORS_HEADERS'] = 'Content-Type'
45
 
46
+ @app.route("/ping", methods=['GET', 'POST'])
47
+ @cross_origin()
48
  def healthcheck():
49
  return json.dumps({"code": 200, "message": "responding"}).encode('utf-8')
50
 
51
+ @app.route("/inference", methods=['GET', 'POST'])
52
+ @cross_origin()
53
  def inference():
54
  if request.headers['Content-Type'] != 'application/json':
55
  exception("Header error")
 
57
  try:
58
  content = request.get_json()
59
  exp_name = content['expName']
60
+ audio_file_path = content['audioFilePath']
61
  pitch_shift = content['pitchShift']
62
+ save_file_path = content['saveFilePath']
63
 
64
  model_path = exp_name + '.pth'
65
+ audio_path = os.path.join('/dotdemo-dev', audio_file_path)
66
  if not os.path.exists('/third_party/RVC/weights/{}'.format(model_path)):
67
  exception("Model doesn't exist")
68
  return json.dumps({"message":"Model doesn't exist"}), 404
 
70
  exception("Audio file doesn't exist")
71
  return json.dumps({"message":"Audio file doesn't exist"}), 404
72
 
73
+ save_path = os.path.join('/dotdemo-dev', save_file_path)
74
+ save_dir = os.path.dirname(save_path)
75
+ os.makedirs(save_dir, exist_ok=True)
76
+
77
  error_log_path = os.path.join("/logs{}.log".format(exp_name))
78
  index_path_list = glob("/third_party/RVC/logs/{}/added_IVF*_Flat_nprobe_1_v1.index".format(exp_name))
79
  index_path = index_path_list[0] if len(index_path_list) > 0 else ""
 
87
  exception("Training process failed")
88
  return json.dumps({"message":"Inference process failed due to {}".format(e)}), 500
89
 
90
+ @app.route("/available-models", methods=['GET', 'POST'])
91
+ @cross_origin()
92
+ def get_available_models():
93
+ model_dir = '/third_party/RVC/weights'
94
+ model_list = os.listdir(model_dir)
95
+ model_list = [model_name[:-4] for model_name in model_list]
96
+ return json.dumps({"message": ','.join(model_list)}), 200
97
+
98
+ @app.route("/delete", methods=['GET', 'POST'])
99
+ @cross_origin()
100
+ def delete_model():
101
+ if request.headers['Content-Type'] != 'application/json':
102
+ exception("Header error")
103
+ return json.dumps({"message":"Header error"}), 500
104
+ try:
105
+ content = request.get_json()
106
+ model_name = content['modelName']
107
+ if model_name == "mute":
108
+ return json.dumps({"message": "Sorry, it's not allowed to delete mute directory"}), 500
109
+
110
+ model_dir = '/third_party/RVC/weights'
111
+ model_path = '{}/{}.pth'.format(model_dir, model_name)
112
+ if not os.path.exists(model_path):
113
+ exception("Model doesn't exist")
114
+ return json.dumps({"message":"Model doesn't exist"}), 404
115
+
116
+ os.remove(model_path)
117
+ log_dir = '/third_party/RVC/logs/' + model_name
118
+ if os.path.exists(log_dir):
119
+ shutil.rmtree(model_path)
120
+ return json.dumps({"message": 'Model {} has been deleted'.format(model_name)}), 200
121
+ except Exception as e:
122
+ exception("Training process failed")
123
+ return json.dumps({"message":"Inference process failed due to {}".format(e)}), 500
124
+
125
  if __name__ == "__main__":
126
  app.run(host="0.0.0.0", port=8081, debug=True)
train_server/server.py CHANGED
@@ -1,18 +1,29 @@
1
  '''
2
  sudo docker run --gpus all --runtime=nvidia --rm \
3
- -v /home/fantasyfish/Desktop/dotdemo/third_party:/third_party \
4
- -v /home/fantasyfish/Desktop/dotdemo/examples:/inputDir \
5
- -v /home/fantasyfish/Desktop/dotdemo/logs:/logs \
6
- -v /home/fantasyfish/Desktop/dotdemo/train_server:/app \
7
- -p 8080:8080 \
8
- -it rvc:v0
9
- curl -X GET http://localhost:8080/train \
10
- -H 'Content-Type: application/json' \
11
- -d '{"expName":"drake-20","trainsetDir":"drake"}'
12
 
13
- curl -X GET http://localhost:8080/check \
14
- -H 'Content-Type: application/json' \
15
- -d '{"expName":"drake-20"}'
 
 
 
 
 
 
 
 
 
 
 
16
  '''
17
  import json
18
  import os
@@ -20,16 +31,21 @@ from flask import Flask, request
20
  from logging import exception
21
  import time
22
  from server_utils import train_model
 
23
 
24
  print("import successful!")
25
 
26
- app = Flask(__name__)
 
 
27
 
28
- @app.route("/ping")
 
29
  def healthcheck():
30
  return json.dumps({"code": 200, "message": "responding"}).encode('utf-8')
31
 
32
- @app.route("/train", methods=['GET'])
 
33
  def train():
34
  if request.headers['Content-Type'] != 'application/json':
35
  exception("Header error")
@@ -37,7 +53,7 @@ def train():
37
  try:
38
  content = request.get_json()
39
  exp_name = content['expName']
40
- trainset_dir = os.path.join('/inputDir', content['trainsetDir'])
41
  log_path = os.path.join("/logs{}.log".format(exp_name))
42
  if os.path.exists('/third_party/RVC/logs/{}'.format(exp_name)):
43
  os.system('rm -rf /third_party/RVC/logs/{}'.format(exp_name))
@@ -54,7 +70,8 @@ def train():
54
  exception("Training process failed")
55
  return json.dumps({"message":"Training process failed due to {}".format(e)}), 500
56
 
57
- @app.route("/check", methods=['GET'])
 
58
  def check():
59
  if request.headers['Content-Type'] != 'application/json':
60
  exception("Header error")
 
1
  '''
2
  sudo docker run --gpus all --runtime=nvidia --rm \
3
+ -v /home/ubuntu/dotdemo/third_party:/third_party \
4
+ -v /home/ubuntu/dotdemo-dev:/dotdemo-dev \
5
+ -v /home/ubuntu/dot-demo-assets/ml-logs:/logs \
6
+ -v /home/ubuntu/dotdemo/train_server:/app \
7
+ --network="host" \
8
+ --shm-size 1G \
9
+ -it fantasyfish677/rvc:v0 /bin/bash
10
+ pip3 install flask_cors
11
+ python3 /app/server.py 2>&1 | tee /logs/train_server.log
12
 
13
+ export FLASK_APP=server
14
+ export FLASK_DEBUG=true
15
+ pip3 install gunicorn
16
+ gunicorn -b :8080 --timeout=600 server:app
17
+
18
+ curl -X GET http://3.16.130.199:8080/ping
19
+
20
+ curl -X POST http://3.16.130.199:8080/train \
21
+ -H 'Content-Type: application/json' \
22
+ -d '{"expName":"varun124","trainsetDir":"varun124"}'
23
+
24
+ curl -X GET http://3.16.130.199:8080/check \
25
+ -H 'Content-Type: application/json' \
26
+ -d '{"expName":"kanye-1"}'
27
  '''
28
  import json
29
  import os
 
31
  from logging import exception
32
  import time
33
  from server_utils import train_model
34
+ from flask_cors import CORS, cross_origin
35
 
36
  print("import successful!")
37
 
38
+ app = Flask("train server")
39
+ cors = CORS(app)
40
+ app.config['CORS_HEADERS'] = 'Content-Type'
41
 
42
+ @app.route("/ping", methods=['GET', 'POST'])
43
+ @cross_origin()
44
  def healthcheck():
45
  return json.dumps({"code": 200, "message": "responding"}).encode('utf-8')
46
 
47
+ @app.route("/train", methods=['GET', 'POST'])
48
+ @cross_origin()
49
  def train():
50
  if request.headers['Content-Type'] != 'application/json':
51
  exception("Header error")
 
53
  try:
54
  content = request.get_json()
55
  exp_name = content['expName']
56
+ trainset_dir = os.path.join('/dotdemo-dev', content['trainsetDir'])
57
  log_path = os.path.join("/logs{}.log".format(exp_name))
58
  if os.path.exists('/third_party/RVC/logs/{}'.format(exp_name)):
59
  os.system('rm -rf /third_party/RVC/logs/{}'.format(exp_name))
 
70
  exception("Training process failed")
71
  return json.dumps({"message":"Training process failed due to {}".format(e)}), 500
72
 
73
+ @app.route("/check", methods=['GET', 'POST'])
74
+ @cross_origin()
75
  def check():
76
  if request.headers['Content-Type'] != 'application/json':
77
  exception("Header error")