Upload 10 files
Browse files- README.md +202 -0
- adapter_config.json +31 -0
- adapter_model.safetensors +3 -0
- optimizer.pt +3 -0
- rng_state.pth +3 -0
- scheduler.pt +3 -0
- test.csv +574 -0
- train.csv +0 -0
- trainer_state.json +63 -0
- training_args.bin +3 -0
README.md
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
library_name: peft
|
3 |
+
base_model: meta-llama/Meta-Llama-3-8B
|
4 |
+
---
|
5 |
+
|
6 |
+
# Model Card for Model ID
|
7 |
+
|
8 |
+
<!-- Provide a quick summary of what the model is/does. -->
|
9 |
+
|
10 |
+
|
11 |
+
|
12 |
+
## Model Details
|
13 |
+
|
14 |
+
### Model Description
|
15 |
+
|
16 |
+
<!-- Provide a longer summary of what this model is. -->
|
17 |
+
|
18 |
+
|
19 |
+
|
20 |
+
- **Developed by:** [More Information Needed]
|
21 |
+
- **Funded by [optional]:** [More Information Needed]
|
22 |
+
- **Shared by [optional]:** [More Information Needed]
|
23 |
+
- **Model type:** [More Information Needed]
|
24 |
+
- **Language(s) (NLP):** [More Information Needed]
|
25 |
+
- **License:** [More Information Needed]
|
26 |
+
- **Finetuned from model [optional]:** [More Information Needed]
|
27 |
+
|
28 |
+
### Model Sources [optional]
|
29 |
+
|
30 |
+
<!-- Provide the basic links for the model. -->
|
31 |
+
|
32 |
+
- **Repository:** [More Information Needed]
|
33 |
+
- **Paper [optional]:** [More Information Needed]
|
34 |
+
- **Demo [optional]:** [More Information Needed]
|
35 |
+
|
36 |
+
## Uses
|
37 |
+
|
38 |
+
<!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
|
39 |
+
|
40 |
+
### Direct Use
|
41 |
+
|
42 |
+
<!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
|
43 |
+
|
44 |
+
[More Information Needed]
|
45 |
+
|
46 |
+
### Downstream Use [optional]
|
47 |
+
|
48 |
+
<!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
|
49 |
+
|
50 |
+
[More Information Needed]
|
51 |
+
|
52 |
+
### Out-of-Scope Use
|
53 |
+
|
54 |
+
<!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
|
55 |
+
|
56 |
+
[More Information Needed]
|
57 |
+
|
58 |
+
## Bias, Risks, and Limitations
|
59 |
+
|
60 |
+
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
|
61 |
+
|
62 |
+
[More Information Needed]
|
63 |
+
|
64 |
+
### Recommendations
|
65 |
+
|
66 |
+
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
|
67 |
+
|
68 |
+
Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
|
69 |
+
|
70 |
+
## How to Get Started with the Model
|
71 |
+
|
72 |
+
Use the code below to get started with the model.
|
73 |
+
|
74 |
+
[More Information Needed]
|
75 |
+
|
76 |
+
## Training Details
|
77 |
+
|
78 |
+
### Training Data
|
79 |
+
|
80 |
+
<!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
|
81 |
+
|
82 |
+
[More Information Needed]
|
83 |
+
|
84 |
+
### Training Procedure
|
85 |
+
|
86 |
+
<!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
|
87 |
+
|
88 |
+
#### Preprocessing [optional]
|
89 |
+
|
90 |
+
[More Information Needed]
|
91 |
+
|
92 |
+
|
93 |
+
#### Training Hyperparameters
|
94 |
+
|
95 |
+
- **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
|
96 |
+
|
97 |
+
#### Speeds, Sizes, Times [optional]
|
98 |
+
|
99 |
+
<!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
|
100 |
+
|
101 |
+
[More Information Needed]
|
102 |
+
|
103 |
+
## Evaluation
|
104 |
+
|
105 |
+
<!-- This section describes the evaluation protocols and provides the results. -->
|
106 |
+
|
107 |
+
### Testing Data, Factors & Metrics
|
108 |
+
|
109 |
+
#### Testing Data
|
110 |
+
|
111 |
+
<!-- This should link to a Dataset Card if possible. -->
|
112 |
+
|
113 |
+
[More Information Needed]
|
114 |
+
|
115 |
+
#### Factors
|
116 |
+
|
117 |
+
<!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
|
118 |
+
|
119 |
+
[More Information Needed]
|
120 |
+
|
121 |
+
#### Metrics
|
122 |
+
|
123 |
+
<!-- These are the evaluation metrics being used, ideally with a description of why. -->
|
124 |
+
|
125 |
+
[More Information Needed]
|
126 |
+
|
127 |
+
### Results
|
128 |
+
|
129 |
+
[More Information Needed]
|
130 |
+
|
131 |
+
#### Summary
|
132 |
+
|
133 |
+
|
134 |
+
|
135 |
+
## Model Examination [optional]
|
136 |
+
|
137 |
+
<!-- Relevant interpretability work for the model goes here -->
|
138 |
+
|
139 |
+
[More Information Needed]
|
140 |
+
|
141 |
+
## Environmental Impact
|
142 |
+
|
143 |
+
<!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
|
144 |
+
|
145 |
+
Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
|
146 |
+
|
147 |
+
- **Hardware Type:** [More Information Needed]
|
148 |
+
- **Hours used:** [More Information Needed]
|
149 |
+
- **Cloud Provider:** [More Information Needed]
|
150 |
+
- **Compute Region:** [More Information Needed]
|
151 |
+
- **Carbon Emitted:** [More Information Needed]
|
152 |
+
|
153 |
+
## Technical Specifications [optional]
|
154 |
+
|
155 |
+
### Model Architecture and Objective
|
156 |
+
|
157 |
+
[More Information Needed]
|
158 |
+
|
159 |
+
### Compute Infrastructure
|
160 |
+
|
161 |
+
[More Information Needed]
|
162 |
+
|
163 |
+
#### Hardware
|
164 |
+
|
165 |
+
[More Information Needed]
|
166 |
+
|
167 |
+
#### Software
|
168 |
+
|
169 |
+
[More Information Needed]
|
170 |
+
|
171 |
+
## Citation [optional]
|
172 |
+
|
173 |
+
<!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
|
174 |
+
|
175 |
+
**BibTeX:**
|
176 |
+
|
177 |
+
[More Information Needed]
|
178 |
+
|
179 |
+
**APA:**
|
180 |
+
|
181 |
+
[More Information Needed]
|
182 |
+
|
183 |
+
## Glossary [optional]
|
184 |
+
|
185 |
+
<!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
|
186 |
+
|
187 |
+
[More Information Needed]
|
188 |
+
|
189 |
+
## More Information [optional]
|
190 |
+
|
191 |
+
[More Information Needed]
|
192 |
+
|
193 |
+
## Model Card Authors [optional]
|
194 |
+
|
195 |
+
[More Information Needed]
|
196 |
+
|
197 |
+
## Model Card Contact
|
198 |
+
|
199 |
+
[More Information Needed]
|
200 |
+
### Framework versions
|
201 |
+
|
202 |
+
- PEFT 0.7.1
|
adapter_config.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"alpha_pattern": {},
|
3 |
+
"auto_mapping": null,
|
4 |
+
"base_model_name_or_path": "meta-llama/Meta-Llama-3-8B",
|
5 |
+
"bias": "none",
|
6 |
+
"fan_in_fan_out": false,
|
7 |
+
"inference_mode": true,
|
8 |
+
"init_lora_weights": true,
|
9 |
+
"layers_pattern": null,
|
10 |
+
"layers_to_transform": null,
|
11 |
+
"loftq_config": {},
|
12 |
+
"lora_alpha": 32,
|
13 |
+
"lora_dropout": 0.05,
|
14 |
+
"megatron_config": null,
|
15 |
+
"megatron_core": "megatron.core",
|
16 |
+
"modules_to_save": null,
|
17 |
+
"peft_type": "LORA",
|
18 |
+
"r": 16,
|
19 |
+
"rank_pattern": {},
|
20 |
+
"revision": null,
|
21 |
+
"target_modules": [
|
22 |
+
"q_proj",
|
23 |
+
"down_proj",
|
24 |
+
"o_proj",
|
25 |
+
"up_proj",
|
26 |
+
"v_proj",
|
27 |
+
"gate_proj",
|
28 |
+
"k_proj"
|
29 |
+
],
|
30 |
+
"task_type": "CAUSAL_LM"
|
31 |
+
}
|
adapter_model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d8d134a867b464adc856b5f01dae0941f27ca474c7bdcd6b470e8782aeec34bb
|
3 |
+
size 167832240
|
optimizer.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1f9798326b30a27d4062af95ef5bf8e6a24796485534d0476d7c00581201d073
|
3 |
+
size 84581014
|
rng_state.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:50b38bb265826e4d13d27ecc837b01eb2f65ab2f5d4618ee7ec8f8d5f71f00a6
|
3 |
+
size 14244
|
scheduler.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fe0caad89d6eb6d8e004bec75a300cce2acf1f6087f850d287d64723309dd1d2
|
3 |
+
size 1064
|
test.csv
ADDED
@@ -0,0 +1,574 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Endpoint,Description,Inputs,Output,Test_Code
|
2 |
+
/config/rest/delete/,requesting to delete the config values when provided valid config name but with invalid token,"config_value = { ""name"": ""primary_server"" }","{
|
3 |
+
""status"": 401,
|
4 |
+
""message"": ""Invalid token""
|
5 |
+
}
|
6 |
+
","def test_config_delete_with_invalid_token(invalid_exec_api):
|
7 |
+
""""""
|
8 |
+
deleting the non deletable config values with invalid token
|
9 |
+
""""""
|
10 |
+
config_value = {
|
11 |
+
""name"": ""primary_server"",
|
12 |
+
}
|
13 |
+
r = invalid_exec_api.config_delete(config_value)
|
14 |
+
test_assert.status(r, 401)
|
15 |
+
result = r.json()
|
16 |
+
assert result['detail'] == ""Invalid token.""
|
17 |
+
"
|
18 |
+
/config/rest/version/,Fetching the information when invalid token is provided,,"{
|
19 |
+
""status"":401,
|
20 |
+
""message"":""invalid token""
|
21 |
+
}","def test_version_config_with_invalid_token(invalid_exec_api):
|
22 |
+
""""""
|
23 |
+
Fetching the information of Version and Build Number with invalid token
|
24 |
+
""""""
|
25 |
+
r = invalid_exec_api.config_version()
|
26 |
+
result = r.json()
|
27 |
+
test_assert.status(r, 401)
|
28 |
+
assert result['detail'] == ""Invalid token."""
|
29 |
+
"/deploy/rest/delete/{UUID}/
|
30 |
+
|
31 |
+
","the manager deletes the image when the manager has rights over the user and the server
|
32 |
+
",,"{
|
33 |
+
""status"":204
|
34 |
+
}","endpoint = ""deploy_delete""
|
35 |
+
|
36 |
+
PARAMETERS = [{""dest_obj"": OBJ_DEPL}]
|
37 |
+
PARAMETERS_SRV_RIGHT = [{""dest_obj"": OBJ_DEPL, ""deploy_with"": SRV_MANAGER_RIGHTS}]
|
38 |
+
PARAMETERS_NO_SRV_RIGHT = [{""dest_obj"": OBJ_DEPL, ""deploy_with"": SRV_NO_MANAGER_RIGHTS}]
|
39 |
+
|
40 |
+
@pytest.mark.parametrize(""custom_lib_non_admin_operations"", PARAMETERS_SRV_RIGHT, indirect=True)
|
41 |
+
@pytest.mark.parametrize(""custom_lib_admin_operations"", PARAMETERS_SRV_RIGHT, indirect=True)
|
42 |
+
|
43 |
+
def test_deploy_delete_manager_server_right(skip_if_not_manager, custom_lib_admin_operations, custom_lib_non_admin_operations, run_api):
|
44 |
+
""""""
|
45 |
+
Deleting the VM by Manager
|
46 |
+
""""""
|
47 |
+
# When the user is not part of the group that the manager manages and deployment is on manager rights to server
|
48 |
+
deploy_id = custom_lib_admin_operations
|
49 |
+
r = run_api.deploy_image_delete(deploy_id, {})
|
50 |
+
test_assert.status(r, manager_rights_response(endpoint, manages_user=False, manages_server=True))
|
51 |
+
|
52 |
+
# When the user is part of the group that the manager manages and deployment is on manager rights to server
|
53 |
+
deploy_id = custom_lib_non_admin_operations
|
54 |
+
r = run_api.deploy_image_delete(deploy_id, {})
|
55 |
+
test_assert.status(r, manager_rights_response(endpoint, manages_user=True, manages_server=True))"
|
56 |
+
/deploy/rest/shutdown/{{UUID}}/,shutting down the deployment of machine by an admin using valid UUID and machine is in running state,,"{
|
57 |
+
""status"" : 201,
|
58 |
+
""response"" : Machine shutdown
|
59 |
+
}","PARAMETERS = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""]}]
|
60 |
+
|
61 |
+
@pytest.mark.parametrize(""custom_lib_non_admin_operations"", PARAMETERS, indirect=True)
|
62 |
+
def test_deploy_shutdown_admin(skip_if_not_admin, custom_lib_non_admin_operations, run_api):
|
63 |
+
""""""
|
64 |
+
shutdown the VM by Admin
|
65 |
+
""""""
|
66 |
+
deploy_id = custom_lib_non_admin_operations
|
67 |
+
r = run_api.deploy_shutdown(deploy_id)
|
68 |
+
test_assert.status(r, 201)
|
69 |
+
|
70 |
+
"
|
71 |
+
/deploy/rest/stop/{{UUID}}/,stopping a machine by an admin when valid UUID is provided and machine is in running state,,"{
|
72 |
+
""status"" : 201,
|
73 |
+
""response"" : stopping deployment
|
74 |
+
}","endpoint = ""deploy_stop""
|
75 |
+
|
76 |
+
PARAMETERS = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""]}]
|
77 |
+
PARAMETERS_SRV_RIGHT = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""], ""deploy_with"": SRV_MANAGER_RIGHTS}]
|
78 |
+
PARAMETERS_NO_SRV_RIGHT = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""], ""deploy_with"": SRV_NO_MANAGER_RIGHTS}]
|
79 |
+
|
80 |
+
@pytest.mark.parametrize(""custom_lib_non_admin_operations"", PARAMETERS, indirect=True)
|
81 |
+
def test_deploy_stop_admin(skip_if_not_admin, custom_lib_non_admin_operations, run_api):
|
82 |
+
""""""
|
83 |
+
Rebooting the VM by Admin
|
84 |
+
""""""
|
85 |
+
# Admin check of Starting a deployment created by different user
|
86 |
+
deploy_id = custom_lib_non_admin_operations
|
87 |
+
r = run_api.deploy_stop(deploy_id)
|
88 |
+
test_assert.status(r, 201)
|
89 |
+
"
|
90 |
+
/deploy/rest/stop/{{UUID}}/,stopping a machine by non-admin user when valid UUID is provided and machine is in running state,,"{
|
91 |
+
""status"" : 403
|
92 |
+
}","endpoint = ""deploy_stop""
|
93 |
+
|
94 |
+
PARAMETERS = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""]}]
|
95 |
+
PARAMETERS_SRV_RIGHT = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""], ""deploy_with"": SRV_MANAGER_RIGHTS}]
|
96 |
+
PARAMETERS_NO_SRV_RIGHT = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""], ""deploy_with"": SRV_NO_MANAGER_RIGHTS}]
|
97 |
+
|
98 |
+
@pytest.mark.parametrize(""custom_lib_admin_operations"", PARAMETERS, indirect=True)
|
99 |
+
def test_deploy_stop_non_admin(skip_if_not_non_admin, custom_lib_admin_operations, run_api):
|
100 |
+
""""""
|
101 |
+
stopping the VM by non-admin
|
102 |
+
""""""
|
103 |
+
deploy_id = custom_lib_admin_operations
|
104 |
+
r = run_api.deploy_stop(deploy_id)
|
105 |
+
test_assert.status(r, 403)
|
106 |
+
"
|
107 |
+
/deploy/rest/stop/{{UUID}}/,"stopping a machine deployment by a manager when valid UUID is provided and machine is in running state , and manager has rights over servers",,,"endpoint = ""deploy_stop""
|
108 |
+
|
109 |
+
PARAMETERS = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""]}]
|
110 |
+
PARAMETERS_SRV_RIGHT = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""], ""deploy_with"": SRV_MANAGER_RIGHTS}]
|
111 |
+
PARAMETERS_NO_SRV_RIGHT = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""], ""deploy_with"": SRV_NO_MANAGER_RIGHTS}]
|
112 |
+
|
113 |
+
@pytest.mark.parametrize(""custom_lib_non_admin_operations"", PARAMETERS_SRV_RIGHT, indirect=True)
|
114 |
+
@pytest.mark.parametrize(""custom_lib_admin_operations"", PARAMETERS_SRV_RIGHT, indirect=True)
|
115 |
+
def test_deploy_stop_manager_server_right(skip_if_not_manager, custom_lib_admin_operations, custom_lib_non_admin_operations, run_api):
|
116 |
+
""""""
|
117 |
+
stopping the VM by manager when have right on server
|
118 |
+
""""""
|
119 |
+
# When the user is not part of the group that the manager manages
|
120 |
+
deploy_id = custom_lib_admin_operations
|
121 |
+
r = run_api.deploy_stop(deploy_id)
|
122 |
+
test_assert.status(r, manager_rights_response(endpoint, manages_user=False, manages_server=True))
|
123 |
+
|
124 |
+
# When the user is part of the group that the manager manages and deployment is on manager rights to server
|
125 |
+
deploy_id = custom_lib_non_admin_operations
|
126 |
+
r = run_api.deploy_stop(deploy_id)
|
127 |
+
test_assert.status(r, manager_rights_response(endpoint, manages_user=True, manages_server=True))
|
128 |
+
run_api.deploy_stop(deploy_id)
|
129 |
+
|
130 |
+
"
|
131 |
+
/deploy/rest/stop/{{UUID}}/,"stopping a machine deployment by a manager when valid UUID is provided and machine is in running state , but manager do not have rights over servers",,,"endpoint = ""deploy_stop""
|
132 |
+
|
133 |
+
PARAMETERS = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""]}]
|
134 |
+
PARAMETERS_SRV_RIGHT = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""], ""deploy_with"": SRV_MANAGER_RIGHTS}]
|
135 |
+
PARAMETERS_NO_SRV_RIGHT = [{""dest_obj"": OBJ_DEPL, ""final_state"": DEPL_STATE[""running""], ""deploy_with"": SRV_NO_MANAGER_RIGHTS}]
|
136 |
+
|
137 |
+
@pytest.mark.parametrize(""custom_lib_non_admin_operations"", PARAMETERS_NO_SRV_RIGHT, indirect=True)
|
138 |
+
@pytest.mark.parametrize(""custom_lib_admin_operations"", PARAMETERS_NO_SRV_RIGHT, indirect=True)
|
139 |
+
def test_deploy_stop_manager_no_server_right(skip_if_not_manager, custom_lib_admin_operations, custom_lib_non_admin_operations, run_api):
|
140 |
+
""""""
|
141 |
+
stopping the VM by manager when have no right on server
|
142 |
+
""""""
|
143 |
+
# When the user is not part of the group that the manager manages and the deployment is not on manager rightful server
|
144 |
+
deploy_id = custom_lib_admin_operations
|
145 |
+
r = run_api.deploy_stop(deploy_id)
|
146 |
+
test_assert.status(r, manager_rights_response(endpoint, manages_user=False, manages_server=False))
|
147 |
+
|
148 |
+
# When the user is part of the group that the manager manages but the deployment is not on manager rightful server
|
149 |
+
deploy_id = custom_lib_non_admin_operations
|
150 |
+
r = run_api.deploy_stop(deploy_id)
|
151 |
+
test_assert.status(r, manager_rights_response(endpoint, manages_user=True, manages_server=False))
|
152 |
+
|
153 |
+
"
|
154 |
+
/ilibrary/rest/add/,creating an island library and adding it using invalid IPs,"{
|
155 |
+
""name"": ""test_ilibrary_add_invalid_ips"",
|
156 |
+
""is_public"": True,
|
157 |
+
""network_segments"": {
|
158 |
+
""add"": [
|
159 |
+
{
|
160 |
+
""name"": ""network_segment"",
|
161 |
+
""bridge_ip"": ""1921681000"",
|
162 |
+
""start_ip"": ""1921681001"",
|
163 |
+
""end_ip"": ""192168100150""
|
164 |
+
}
|
165 |
+
]
|
166 |
+
}
|
167 |
+
}","{
|
168 |
+
""status"" : 400,
|
169 |
+
""message"" : ""Enter valid IPv4 addresses""
|
170 |
+
}","
|
171 |
+
def test_ilibrary_add_invalid_ips(run_api):
|
172 |
+
""""""
|
173 |
+
Creating an Island Library with invalid bridge ip, start ip, end ip
|
174 |
+
""""""
|
175 |
+
params = {
|
176 |
+
""name"": ""test_ilibrary_add_invalid_ips"",
|
177 |
+
""is_public"": True,
|
178 |
+
""network_segments"": {
|
179 |
+
""add"": [
|
180 |
+
{
|
181 |
+
""name"": ""network_segment"",
|
182 |
+
""bridge_ip"": ""1921681000"",
|
183 |
+
""start_ip"": ""1921681001"",
|
184 |
+
""end_ip"": ""192168100150""
|
185 |
+
}
|
186 |
+
]
|
187 |
+
}
|
188 |
+
}
|
189 |
+
params, r = run_api.ilibrary_add_new_island(params=params)
|
190 |
+
test_assert.status(r, 400)
|
191 |
+
rjson = r.json()
|
192 |
+
errors = rjson['network_segments']['add'][0]
|
193 |
+
assert errors['start_ip'] == ['Enter a valid IPv4 address.']
|
194 |
+
assert errors['end_ip'] == ['Enter a valid IPv4 address.']
|
195 |
+
assert errors['bridge_ip'] == ['Enter a valid IPv4 address.']
|
196 |
+
"
|
197 |
+
/ilibrary/rest/details/{UUID}/,fetching details of public island library from private island,"machine1 = {
|
198 |
+
""uuid"": r1.json()[""uuid""],
|
199 |
+
""nics"": {
|
200 |
+
""add"": [
|
201 |
+
{
|
202 |
+
""mac"": ""auto"",
|
203 |
+
""type"": ""bridge"",
|
204 |
+
""model"": networks[0].get(""model"", ""virtio""),
|
205 |
+
""segment"": ""Default Public Segment""
|
206 |
+
},
|
207 |
+
|
208 |
+
],
|
209 |
+
}
|
210 |
+
|
211 |
+
}
|
212 |
+
params = {
|
213 |
+
""name"": ""Machine1"",
|
214 |
+
""is_public"": False,
|
215 |
+
""machines"": {
|
216 |
+
""add"": [machine1],
|
217 |
+
},
|
218 |
+
}","{
|
219 |
+
""response"" : success
|
220 |
+
}","def test_ilibrary_details_with_edit_private_island_to_public_island(skip_if_not_admin, run_api):
|
221 |
+
""""""
|
222 |
+
To check machine type with public island
|
223 |
+
""""""
|
224 |
+
params1, r1 = run_api.library_add_new_vm(networks=networks)
|
225 |
+
machine1 = {
|
226 |
+
""uuid"": r1.json()[""uuid""],
|
227 |
+
""nics"": {
|
228 |
+
""add"": [
|
229 |
+
{
|
230 |
+
""mac"": ""auto"",
|
231 |
+
""type"": ""bridge"",
|
232 |
+
""model"": networks[0].get(""model"", ""virtio""),
|
233 |
+
""segment"": ""Default Public Segment""
|
234 |
+
},
|
235 |
+
|
236 |
+
],
|
237 |
+
}
|
238 |
+
|
239 |
+
}
|
240 |
+
params = {
|
241 |
+
""name"": ""Machine1"",
|
242 |
+
""is_public"": False,
|
243 |
+
""machines"": {
|
244 |
+
""add"": [machine1],
|
245 |
+
},
|
246 |
+
}
|
247 |
+
params, r = run_api.ilibrary_add_new_island(params=params)
|
248 |
+
island_id = r.json()[""uuid""]
|
249 |
+
params, r = run_api.ilibrary_edit_island(uuid=island_id, params={""is_public"": True})
|
250 |
+
res = r.json()[""machines""]
|
251 |
+
run_api.ilibrary_delete(uuid=island_id)
|
252 |
+
run_api.library_delete(r1.json()[""uuid""])
|
253 |
+
for machine in res:
|
254 |
+
if not machine[""is_public""]:
|
255 |
+
assert False, ""The json is %s"" % r.json()
|
256 |
+
|
257 |
+
"
|
258 |
+
/library/rest/add,adding vm to library when multiple boot disks and same order is passed,"disks = [{ ""size"": 20, ""port"": ""sdb"", ""type"": ""sata"", ""format"": ""qcow2"", ""is_boot"": True, ""boot_order"": 1 }, { ""size"": 20, ""port"": ""sda"", ""type"": ""sata"", ""format"": ""qcow2"", ""is_boot"": True, ""boot_order"": 1 }]","{
|
259 |
+
""status"" : 400,
|
260 |
+
""response"" : Bad request
|
261 |
+
}","def test_add_vm_to_library_multiple_bootable_disk_with_same_boot_order(run_api):
|
262 |
+
""""""
|
263 |
+
If multiple bootable cds with same boot order is passed
|
264 |
+
""""""
|
265 |
+
|
266 |
+
disks = [{
|
267 |
+
""size"": 20,
|
268 |
+
""port"": ""sdb"",
|
269 |
+
""type"": ""sata"",
|
270 |
+
""format"": ""qcow2"",
|
271 |
+
""is_boot"": True,
|
272 |
+
""boot_order"": 1
|
273 |
+
},
|
274 |
+
{
|
275 |
+
""size"": 20,
|
276 |
+
""port"": ""sda"",
|
277 |
+
""type"": ""sata"",
|
278 |
+
""format"": ""qcow2"",
|
279 |
+
""is_boot"": True,
|
280 |
+
""boot_order"": 1
|
281 |
+
}]
|
282 |
+
|
283 |
+
params, response = run_api.library_add_new_vm(disks=disks, noraise=True)
|
284 |
+
test_assert.status(response, 400)
|
285 |
+
"
|
286 |
+
/library/rest/adddisk/{{UUID}}/ ,adding disk to library when provided lib_UUID that does not exist,"lib_UUID = ""doesnotexist""","{
|
287 |
+
""status"" : 404
|
288 |
+
}","def test_lib_add_disk_with_invalid_UUID(run_api):
|
289 |
+
lib_UUID = ""doesnotexist""
|
290 |
+
r = run_api.library_add_disk(lib_UUID)
|
291 |
+
test_assert.status(r, 404)
|
292 |
+
"
|
293 |
+
/library/rest/ctypes/,getting the console type when requested,,"{
|
294 |
+
""status"" : 200,
|
295 |
+
""response"" : console type details displayed
|
296 |
+
}","def test_library_ctypes(run_api):
|
297 |
+
""""""
|
298 |
+
Getting the list of console type
|
299 |
+
""""""
|
300 |
+
r = run_api.library_console_types()
|
301 |
+
result = r.json()
|
302 |
+
test_assert.status(result, LIBRARY_CONSOLE_TYPE, ""library_ctypes"")
|
303 |
+
test_assert.status(r, 200)
|
304 |
+
"
|
305 |
+
/library/rest/delete/{UUID}/,deleting a library by non-admin when provided with valid UUID,,"{
|
306 |
+
""status"" : 403
|
307 |
+
}","PARAMETERS = [{""dest_obj"": OBJ_LIB}]
|
308 |
+
|
309 |
+
@pytest.mark.parametrize(""custom_lib_admin_operations"", PARAMETERS, indirect=True)
|
310 |
+
def test_lib_delete_non_admin(skip_if_not_non_admin, custom_lib_admin_operations, run_api):
|
311 |
+
""""""
|
312 |
+
Deleting the Library by non-Admin
|
313 |
+
""""""
|
314 |
+
# Non-admin check for deleting the Library created by different user.
|
315 |
+
lib_id = custom_lib_admin_operations
|
316 |
+
r = run_api.library_delete(lib_id, {})
|
317 |
+
test_assert.status(r, 403)
|
318 |
+
|
319 |
+
"
|
320 |
+
/library/rest/edit/{UUID}/,deletion of disk when invalid UUID provided,"{""delete"": [
|
321 |
+
{
|
322 |
+
""UUID"": disk_UUID,
|
323 |
+
""port"": ""sdz"",
|
324 |
+
""type"": r['hw']['disks'][0]['type']
|
325 |
+
}
|
326 |
+
]
|
327 |
+
}","{
|
328 |
+
""status"" : 404,
|
329 |
+
""message"" : ""Disk with UUID does not exist""
|
330 |
+
}","def test_library_edit_delete_invalid_disk_UUID(library_add_new_vm, run_api):
|
331 |
+
""""""
|
332 |
+
delete disk with invalid UUID
|
333 |
+
""""""
|
334 |
+
p, r = library_add_new_vm
|
335 |
+
lib_id = r['UUID']
|
336 |
+
disk_UUID = str(UUID.UUID4())
|
337 |
+
disk_UUID = 'invalid'
|
338 |
+
disks = {""delete"": [
|
339 |
+
{
|
340 |
+
""UUID"": disk_UUID,
|
341 |
+
""port"": ""sdz"",
|
342 |
+
""type"": r['hw']['disks'][0]['type']
|
343 |
+
}
|
344 |
+
]
|
345 |
+
}
|
346 |
+
params = {""hw"": {""disks"": disks}}
|
347 |
+
res = run_api.library_edit(lib_id, params)
|
348 |
+
test_assert.status(res, 404)
|
349 |
+
rjson = res.json()
|
350 |
+
assert rjson['error'] == f""Disk with UUID {disk_UUID} does not exist"", ""|> json %s"" % rjson
|
351 |
+
"
|
352 |
+
/library/rest/edit/{UUID}/,updation of library when is_public flag set to True,"params = { ""is_public"": True, ""hw"": {} }","{
|
353 |
+
""status"" : 400,
|
354 |
+
""message"" : ""Failed to create task to sync public layers on primary""
|
355 |
+
}","def test_library_edit_update_is_public_flag(skip_if_not_non_admin, library_add_new_vm, run_api):
|
356 |
+
""""""
|
357 |
+
Update is_public flag
|
358 |
+
""""""
|
359 |
+
p, res = library_add_new_vm
|
360 |
+
UUID = res['UUID']
|
361 |
+
params = {
|
362 |
+
""is_public"": True,
|
363 |
+
""hw"": {}
|
364 |
+
}
|
365 |
+
r = run_api.library_edit(UUID, params)
|
366 |
+
test_assert.status(r, 400)
|
367 |
+
rjson = r.json()
|
368 |
+
assert rjson['error'] == ""Failed to create task to sync public layers on primary"", ""Json |> %s"" % rjson
|
369 |
+
"
|
370 |
+
/library/rest/edit/{UUID}/,updation of network in a library with valid data,"networks = [{
|
371 |
+
""type"": ""bridge"",
|
372 |
+
""model"": ""virtio"",
|
373 |
+
""segment"": ""Default Public Segment"",
|
374 |
+
}
|
375 |
+
]
|
376 |
+
|
377 |
+
update_network = [{
|
378 |
+
""type"": ""host"",
|
379 |
+
""model"": ""virtio"",
|
380 |
+
""segment"": ""HostOnly Segment"",
|
381 |
+
}]
|
382 |
+
|
383 |
+
","{
|
384 |
+
""status"" : 201
|
385 |
+
}","def test_library_edit_network_with_valid_data(run_api):
|
386 |
+
""""""
|
387 |
+
edit network with valid data
|
388 |
+
""""""
|
389 |
+
networks = [{
|
390 |
+
""type"": ""bridge"",
|
391 |
+
""model"": ""virtio"",
|
392 |
+
""segment"": ""Default Public Segment"",
|
393 |
+
}
|
394 |
+
]
|
395 |
+
params, r = run_api.library_add_new_vm(networks=networks)
|
396 |
+
update_netork = [{
|
397 |
+
""type"": ""host"",
|
398 |
+
""model"": ""virtio"",
|
399 |
+
""segment"": ""HostOnly Segment"",
|
400 |
+
}]
|
401 |
+
params = {'hw': {'networks': update_netork}}
|
402 |
+
lib_id = r.json()[""UUID""]
|
403 |
+
res = run_api.library_edit(lib_id, params)
|
404 |
+
test_assert.status(res, 201)
|
405 |
+
rjson = res.json()
|
406 |
+
new_network = rjson['hw']['networks'][0]
|
407 |
+
assert new_network['type'] == 'host', ""|> Error %s"" % rjson
|
408 |
+
assert new_network['segment'] == 'HostOnly Segment', ""|> Error %s"" % rjson
|
409 |
+
run_api.library_delete(lib_id, {})
|
410 |
+
"
|
411 |
+
/library/rest/hvmtypes/,fetching the hypervisor type when requested,,"{
|
412 |
+
""status"" : 200,
|
413 |
+
""response"" : list of hypervisor type
|
414 |
+
}","def test_library_hvmtypes(run_api):
|
415 |
+
""""""
|
416 |
+
Getting the list of Hypervisor type
|
417 |
+
""""""
|
418 |
+
r = run_api.library_hvmtypes()
|
419 |
+
result = r.json()
|
420 |
+
test_assert.status(result, LIBRARY_HVM_TYPE, ""library_hvmtypes"")
|
421 |
+
test_assert.status(r, 200)
|
422 |
+
"
|
423 |
+
/library/rest/revisions/,requesting the revision list of library when machine_UUID is empty,"{
|
424 |
+
machine_UUID : ''
|
425 |
+
}","{
|
426 |
+
""status"" : 400,
|
427 |
+
""message"" : ""Machine UUID should be provided""}","def test_library_revisions_without_UUID(run_api):
|
428 |
+
""""""
|
429 |
+
Without UUID
|
430 |
+
""""""
|
431 |
+
res = run_api.library_revisions("""")
|
432 |
+
test_assert.status(res, 400)
|
433 |
+
rjson = res.json()
|
434 |
+
assert rjson['detail'] == ""Machine UUID should be provided"", ""|> The error %s"" % rjson
|
435 |
+
"
|
436 |
+
/rtask/rest/rlist/,fetching the list of remote tasks without authorization,,"{
|
437 |
+
""status"" : 401,
|
438 |
+
""message"" : ""Authentication credentials were not provided""
|
439 |
+
}","def test_rtask_rlist_without_authorization(anonymous_exec_api):
|
440 |
+
""""""
|
441 |
+
Fetching the List of Jobs without authorization
|
442 |
+
""""""
|
443 |
+
r = anonymous_exec_api.rtask_rlist()
|
444 |
+
res = r.json()
|
445 |
+
test_assert.status(r, 401)
|
446 |
+
assert res['detail'] == ""Authentication credentials were not provided.""
|
447 |
+
"
|
448 |
+
ideploy/rest/change_ownership,Successful change of ownership from one user to another where both users exist and the requester has the necessary permissions.,"{
|
449 |
+
""deployment_UUIDs"": [UUID],
|
450 |
+
""owner"": ,
|
451 |
+
""dest_user"": ""manager""
|
452 |
+
}","{""status"": 200, ""message"": ""Operation performed successfully without any error""}","def test_deploy_change_ownership(skip_if_non_admin, non_admin_exec_api, run_api):
|
453 |
+
""""""
|
454 |
+
To change ownership of deployed machine from non-admin user to manager by admin
|
455 |
+
""""""
|
456 |
+
params, r = non_admin_exec_api.library_add_new_vm()
|
457 |
+
lib_id = r.json()[""UUID""]
|
458 |
+
r = non_admin_exec_api.deploy_image(lib_id=lib_id, deploy_on=list(run_api.clm_my_servers.keys()))
|
459 |
+
UUID = r.json()['UUID']
|
460 |
+
params = {
|
461 |
+
""deployment_UUIDs"": [UUID],
|
462 |
+
""owner"": non_admin_exec_api.user,
|
463 |
+
""dest_user"": ""manager""
|
464 |
+
}
|
465 |
+
res = run_api.deploy_change_ownership(params=params)
|
466 |
+
test_assert.status(res, 200)
|
467 |
+
new_owner = run_api.deploy_details(deploy_id=UUID).json()['owner']
|
468 |
+
assert new_owner == ""manager""
|
469 |
+
run_api.deploy_image_delete(deploy_id=UUID)
|
470 |
+
run_api.library_delete(UUID=lib_id)
|
471 |
+
"
|
472 |
+
"license/rest/licenses_check
|
473 |
+
"," when day params is negative
|
474 |
+
","{
|
475 |
+
""days"" : -1
|
476 |
+
}","{
|
477 |
+
""status:200,
|
478 |
+
""message"": ""Value of `days` cannot be negative""
|
479 |
+
}","def test_license_check_when_day_is_negative(run_api):
|
480 |
+
""""""
|
481 |
+
license check when day is negative
|
482 |
+
""""""
|
483 |
+
r = run_api.license_check(days=-1)
|
484 |
+
rjson = r.json()
|
485 |
+
test_assert.status(r, 400)
|
486 |
+
assert rjson['error'] == ""Value of `days` cannot be negative"", ""The error %s"" % rjson"
|
487 |
+
/deploy/rest/deploylist,"getting list of image of deployed machine by setting scope to ""my"". Check the user type before performing the operation, only admin user type have the permission to perform such operations.","{
|
488 |
+
scope : ""my""
|
489 |
+
}","{
|
490 |
+
""response"" : success
|
491 |
+
}","def test_deploy_list_filter_with_scope_my(run_api, admin_exec_api, library_add_new_vm):
|
492 |
+
""""""
|
493 |
+
filter deploy list using scope = my
|
494 |
+
""""""
|
495 |
+
params, r = library_add_new_vm
|
496 |
+
lib_id = r[""uuid""]
|
497 |
+
r = admin_exec_api.deploy_image(lib_id)
|
498 |
+
deploy_id = r.json()[""uuid""]
|
499 |
+
count = check_count_deploylist(run_api, deploy_id, params={'scope': 'my', 'uuid': deploy_id})
|
500 |
+
|
501 |
+
if run_api.user_type == USER_TYPE['non_admin'] or run_api.user_type == USER_TYPE['manager']:
|
502 |
+
assert count == 0
|
503 |
+
elif run_api.user_type == USER_TYPE['admin']:
|
504 |
+
assert count == 1
|
505 |
+
|
506 |
+
r = admin_exec_api.deploy_image_delete(deploy_id)
|
507 |
+
"
|
508 |
+
/config/rest/set/,"setting the None value to secret config. Check the user type before performing the operation, only admin user type have the permission to perform such operations.
|
509 |
+
","{
|
510 |
+
""name"": ""secret"",
|
511 |
+
""value"": None
|
512 |
+
}","{
|
513 |
+
""status"" : 400,
|
514 |
+
""response"" : 'Invalid secret_key Value'
|
515 |
+
}","def test_config_None_set_secret(run_api):
|
516 |
+
""""""
|
517 |
+
Set the secret-key config value as None
|
518 |
+
""""""
|
519 |
+
config_value = {
|
520 |
+
""name"": ""secret"",
|
521 |
+
""value"": None
|
522 |
+
}
|
523 |
+
r = run_api.config_set(config_value)
|
524 |
+
res = r.json()
|
525 |
+
if run_api.user_type in [USER_TYPE[""manager""], USER_TYPE[""non_admin""]]:
|
526 |
+
test_assert.status(r, 403)
|
527 |
+
elif run_api.user_type == USER_TYPE['admin']:
|
528 |
+
test_assert.status(r, 400)
|
529 |
+
assert res[""result""] == 'FAILURE'
|
530 |
+
assert 'Invalid secret_key Value' in res[""error""], res
|
531 |
+
|
532 |
+
|
533 |
+
"
|
534 |
+
/group/rest/add/,"adding new group when group name field is missing. Check the user type before performing the operation, only admin user type have the permission to perform such operations.
|
535 |
+
",,"{
|
536 |
+
""status"" : 400,
|
537 |
+
""message"" : ""Group Name is required and it can not be blank""
|
538 |
+
}","def test_add_group_with_group_name_field_missing(run_api):
|
539 |
+
""""""
|
540 |
+
Adding new Group with group name field missing
|
541 |
+
""""""
|
542 |
+
params, r = run_api.group_add(template={})
|
543 |
+
if run_api.user_type in [USER_TYPE[""non_admin""], USER_TYPE[""manager""]]:
|
544 |
+
test_assert.status(r, 403)
|
545 |
+
elif run_api.user_type == USER_TYPE[""admin""]:
|
546 |
+
result = r.json()
|
547 |
+
test_assert.status(r, 400)
|
548 |
+
assert result['error'] == ""Group Name is required and it can not be blank""
|
549 |
+
|
550 |
+
"
|
551 |
+
/group/rest/update/,"updating the deployment strategy of a group using an invalid value. Check the user type before performing the operation, only admin user type have the permission to perform such operations.
|
552 |
+
","{
|
553 |
+
""name"",
|
554 |
+
""deployment_strategy"": 'invalid'
|
555 |
+
}","{
|
556 |
+
""status"" : 400,
|
557 |
+
""message"" : ""Invalid deployment_strategy""
|
558 |
+
}","def test_group_update_invalid_deployment_strategy(skip_if_not_admin, group_add, run_api):
|
559 |
+
""""""
|
560 |
+
group update invalid deployment_strategy
|
561 |
+
""""""
|
562 |
+
params, r = group_add
|
563 |
+
rjson = r.json()
|
564 |
+
group_id = rjson['id']
|
565 |
+
group_param = {
|
566 |
+
""name"": rjson['name'],
|
567 |
+
""deployment_strategy"": 'invalid'
|
568 |
+
}
|
569 |
+
updated_param, r = run_api.group_update(group_id, group_param)
|
570 |
+
run_api.user_type == USER_TYPE[""admin""]
|
571 |
+
result = r.json()
|
572 |
+
test_assert.status(r, 400)
|
573 |
+
assert result['error'] == ""Invalid deployment_strategy"", ""|> Json %s"" % result
|
574 |
+
"
|
train.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
trainer_state.json
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": 0.08488664031028748,
|
3 |
+
"best_model_checkpoint": "/kaggle/working/trainer/checkpoint-354",
|
4 |
+
"epoch": 3.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 354,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 1.0,
|
13 |
+
"learning_rate": 4.388888888888889e-05,
|
14 |
+
"loss": 0.7966,
|
15 |
+
"step": 118
|
16 |
+
},
|
17 |
+
{
|
18 |
+
"epoch": 1.0,
|
19 |
+
"eval_loss": 0.14867298305034637,
|
20 |
+
"eval_runtime": 37.5027,
|
21 |
+
"eval_samples_per_second": 0.693,
|
22 |
+
"eval_steps_per_second": 0.347,
|
23 |
+
"step": 118
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 2.0,
|
27 |
+
"learning_rate": 3.2962962962962964e-05,
|
28 |
+
"loss": 0.105,
|
29 |
+
"step": 236
|
30 |
+
},
|
31 |
+
{
|
32 |
+
"epoch": 2.0,
|
33 |
+
"eval_loss": 0.10169088840484619,
|
34 |
+
"eval_runtime": 37.4736,
|
35 |
+
"eval_samples_per_second": 0.694,
|
36 |
+
"eval_steps_per_second": 0.347,
|
37 |
+
"step": 236
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 3.0,
|
41 |
+
"learning_rate": 2.2037037037037038e-05,
|
42 |
+
"loss": 0.073,
|
43 |
+
"step": 354
|
44 |
+
},
|
45 |
+
{
|
46 |
+
"epoch": 3.0,
|
47 |
+
"eval_loss": 0.08488664031028748,
|
48 |
+
"eval_runtime": 37.5454,
|
49 |
+
"eval_samples_per_second": 0.692,
|
50 |
+
"eval_steps_per_second": 0.346,
|
51 |
+
"step": 354
|
52 |
+
}
|
53 |
+
],
|
54 |
+
"logging_steps": 500,
|
55 |
+
"max_steps": 590,
|
56 |
+
"num_input_tokens_seen": 0,
|
57 |
+
"num_train_epochs": 5,
|
58 |
+
"save_steps": 500,
|
59 |
+
"total_flos": 1.3117494532453171e+17,
|
60 |
+
"train_batch_size": 2,
|
61 |
+
"trial_name": null,
|
62 |
+
"trial_params": null
|
63 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:05cd2b18a00bd366c2eb1651ba82f068e5dd0988fec2f9720ca54c19c676c666
|
3 |
+
size 4728
|