Related
I was trying to run a hyperparameter tuning job on Vertex AI using the Python SDK described here. Around 2 hours ago it was successful sending the job to run. I noticed there are some errors in my code so the run failed, and I went back and fix it, and I re-run the code and what I got was as below.
Traceback (most recent call last):
File "/workspace/.pip-modules/lib/python3.8/site-packages/google/api_core/grpc_helpers.py", line 67, in error_remapped_callable
return callable_(*args, **kwargs)
File "/workspace/.pip-modules/lib/python3.8/site-packages/grpc/_channel.py", line 946, in __call__
return _end_unary_response_blocking(state, call, False, None)
File "/workspace/.pip-modules/lib/python3.8/site-packages/grpc/_channel.py", line 849, in _end_unary_response_blocking
raise _InactiveRpcError(state)
grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that terminated with:
status = StatusCode.INTERNAL
details = "Internal error encountered."
debug_error_string = "{"created":"#1623393121.374988331","description":"Error received from peer ipv4:142.251.33.74:443","file":"src/core/lib/surface/call.cc","file_line":1066,"grpc_message":"Internal error encountered.","grpc_status":13}"
>
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/workspace/ariel_ml_2021/hparam_tuning.py", line 140, in <module>
create_hyperparameter_tuning_job_python_package()
File "/workspace/ariel_ml_2021/hparam_tuning.py", line 133, in create_hyperparameter_tuning_job_python_package
response = client.create_hyperparameter_tuning_job(
File "/workspace/.pip-modules/lib/python3.8/site-packages/google/cloud/aiplatform_v1/services/job_service/client.py", line 1363, in create_hyperparameter_tuning_job
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
File "/workspace/.pip-modules/lib/python3.8/site-packages/google/api_core/gapic_v1/method.py", line 145, in __call__
return wrapped_func(*args, **kwargs)
File "/workspace/.pip-modules/lib/python3.8/site-packages/google/api_core/grpc_helpers.py", line 69, in error_remapped_callable
six.raise_from(exceptions.from_grpc_error(exc), exc)
File "<string>", line 3, in raise_from
google.api_core.exceptions.InternalServerError: 500 Internal error encountered.
I thought this might be due to my changes in the python code that causes the error, so I try with the original copy (without making any changes), and the error persists. The code for hyperparameter tuning is as below if required.
from google.cloud import aiplatform
def create_hyperparameter_tuning_job_python_package(
project: str = "<my_project_id>",
display_name: str = "<some_description>",
executor_image_uri: str = "us-docker.pkg.dev/vertex-ai/training/tf-cpu.2-4:latest",
package_uri: str = "gs://<bucket_name>/",
python_module: str = "train_second", # located at gs://<bucket_name>/train_second.py
location: str = "us-central1",
api_endpoint: str = "us-central1-aiplatform.googleapis.com",
):
client_options = {"api_endpoint": api_endpoint}
client = aiplatform.gapic.JobServiceClient(client_options=client_options)
metric = {
"metric_id": "ariel_score",
"goal": aiplatform.gapic.StudySpec.MetricSpec.GoalType.MAXIMIZE,
}
conditional_param_H1 = {
"parameter_spec": {
"parameter_id": "H1",
"discrete_value_spec": {"values": [4, 8, 16, 32, 64, 128, 256, 512, 1024]},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
},
"parent_discrete_values": {"values": [10, 25, 50, 100]}
}
conditional_param_H2 = {
"parameter_spec": {
"parameter_id": "H2",
"discrete_value_spec": {"values": [64, 128, 256, 512, 1024]},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
},
"parent_discrete_values": {"values": [10, 25, 50, 100]}
}
conditional_param_H3 = {
"parameter_spec": {
"parameter_id": "H3",
"discrete_value_spec": {"values": [4, 8, 16, 32, 64, 128, 256, 512, 1024]},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
},
"parent_discrete_values": {"values": [10, 25, 50, 100]}
}
conditional_param_D1 = {
"parameter_spec": {
"parameter_id": "D1",
"double_value_spec": {"min_value": 0.01, "max_value": 0.5},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
},
"parent_discrete_values": {"values": [10, 25, 50, 100]}
}
conditional_param_mean = {
"parameter_spec": {
"parameter_id": "mean",
"discrete_value_spec": {"values": [0., 1.]},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
},
"parent_discrete_values": {"values": [10, 25, 50, 100]}
}
conditional_param_std = {
"parameter_spec": {
"parameter_id": "std",
"double_value_spec": {"min_value": 0.005, "max_value": 0.5},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
},
"parent_discrete_values": {"values": [10, 25, 50, 100]}
}
conditional_param_lr = {
"parameter_spec": {
"parameter_id": "lr",
"discrete_value_spec": {"values": [0.0001, 0.0003, 0.001, 0.003, 0.01, 0.03, 0.1, 0.3]},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
},
"parent_discrete_values": {"values": [10, 25, 50, 100]}
}
parameter = {
"parameter_id": "batch_size",
"discrete_value_spec": {"values": [10, 25, 50, 100]},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
"conditional_parameter_specs": [
conditional_param_H1,
conditional_param_H2,
conditional_param_H3,
conditional_param_D1,
conditional_param_mean,
conditional_param_std,
conditional_param_lr,
],
}
# Trial job spec
machine_spec = {
"machine_type": "e2-standard-4",
}
worker_pool_spec = {
"machine_spec": machine_spec,
"replica_count": 1,
"python_package_spec": {
"executor_image_uri": executor_image_uri,
"package_uris": [package_uri],
"python_module": python_module,
"args": [],
}
}
# hparam tuning job
hyperparameter_tuning_job = {
"display_name": display_name,
"max_trial_count": 2,
"parallel_trial_count": 2,
"study_spec": {
"metrics": [metric],
"parameters": [parameter],
},
"trial_job_spec": {"worker_pool_specs": [worker_pool_spec]},
}
parent = f"projects/{project}/locations/{location}"
response = client.create_hyperparameter_tuning_job(
parent=parent, hyperparameter_tuning_job=hyperparameter_tuning_job
)
print(f"response:", response)
if __name__ == "__main__":
create_hyperparameter_tuning_job_python_package()
Thanks in advance.
It seems that the endpoint at us-central1 is experiencing the problem. The workaround is to use another endpoint such as with us-east1 and the problem is solved.
Your package_uri is incorrect. It should point to a file containing the Python package (i.e. a tar.bz file with all the code), not a directory or a bucket.
I feel like there should be an easy way to do this but I can not find a solution online. I'm looking for a one liner something pythonic.
I know you can do this:
Img[:,:,2]=200
but how can you add a condition to this?
if Img[:,:,2]<=100: 200
or
image =[i==100 for i in Img[:,:,2] if i <= 100]
Thanks
To simulate if a[:,:,2]<=100: 200, we can use np.where -
a[:,:,2] = np.where(a[:,:,2] <= 100, 200, a[:,:,2])
Or simply use the mask to assign -
a[a[:,:,2] <= 100,2] = 200
Sample run -
In [379]: a
Out[379]:
array([[[ 78, 134, 7],
[154, 37, 146],
[ 39, 95, 13]],
[[114, 138, 100],
[175, 198, 148],
[ 39, 130, 37]]])
In [380]: a[a[:,:,2] <= 100,2] = 200
In [381]: a
Out[381]:
array([[[ 78, 134, 200],
[154, 37, 146],
[ 39, 95, 200]],
[[114, 138, 200],
[175, 198, 148],
[ 39, 130, 200]]])
I have a problem.
I don't understand why mockito go in exception.
RecordWriter write a Record into the file. I want verify if it write or not.
I implemented another function and i tried to mock(OutputStream.class), but the result is the same. My current test is:
try (RecordWriter writer = new RecordWriter(serverAlias))
{
RecordWriter wr = mock(RecordWriter.class);
writer.writeRecordToFile(httpRecord.printRecord().getBytes(CHARSET_UTF8));
verify(wr,times(1)).writeRecordToFile(httpRecord.printRecord().getBytes(CHARSET_UTF8));
}
The exception is:
Wanted but not invoked:
recordWriter.writeRecordToFile(
[50, 48, 49, 54, 48, 51, 49, 56, 45, 49, 50, 49, 52, 49, 51, 44, 50, 48, 49, 54, 48, 51, 49, 56, 49, 49, 50, 50, 44, 50, 48, 49, 54, 48, 51, 49, 56, 49, 49, 50, 57, 44, 51, 51, 51, 54, 53, 52, 56, 55, 57, 56, 44, 112, 114, 111, 118, 97, 44, 104, 101, 108, 108, 111, 44, 105, 32, 100, 111, 110, 39, 116, 32, 107, 110, 111, 119, 10]
);
-> at report.RecordWriterTest.setup(RecordWriterTest.java:72)
Actually, there were zero interactions with this mock.
at report.RecordWriterTest.setup(RecordWriterTest.java:72)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.eclipse.jdt.internal.junit4.runner.JUnit4TestReference.run(JUnit4TestReference.java:86)
at org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:38)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:459)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:675)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:382)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:192)
Thanks to all.
You are not calling the writeRecordToFile method on the mock.
You have:
1) a real instance - RecordWriter writer = new RecordWriter(serverAlias)
2) a mock - RecordWriter wr = mock(RecordWriter.class);
and you call the method on the real instance (writer) and try to verify if the mock (wr) was interacted with.
Change the method call to: wr.writeRecordToFile(httpRecord.printRecord().getBytes(CHARSET_UTF8));
and it should work. Although it seems quite a pointless test, if you ask me.
I have 2 data points as below, on plotting them I see a weired y axisl, in spite of explicitly enforcing set_yaxis - what I am doing wrong ?
xs =
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199]
ys=
[10828623, 5274952, 1085078, 901346, 605910, 569867, 531991, 514140, 466894, 460954, 439024, 379639, 378027, 354425, 302439, 300041, 285898, 268924, 255451, 251349, 244723, 239494, 236684, 231563, 227331, 225260, 215933, 185900, 183423, 183165, 181345, 171513, 168599, 161532, 155134, 152262, 149294, 145263, 144888, 142472, 139841, 137502, 136515, 127380, 126405, 123018, 121811, 116477, 116447, 111383, 106975, 104696, 93530, 92810, 91116, 89956, 88917, 88487, 87681, 86163, 80010, 75076, 74987, 70867, 70330, 69812, 69017, 68317, 67876, 64657, 63097, 62950, 61399, 59045, 58973, 56203, 55505, 54397, 53311, 52447, 50366, 49832, 48918, 47895, 47157, 46534, 45062, 44127, 43042, 42016, 41045, 39971, 39723, 39090, 36163, 35637, 34947, 33352, 31036, 30218, 29766, 29238, 28952, 25899, 25799, 25770, 25159, 24403, 23485, 22593, 22219, 21363, 21207, 20822, 20682, 19827, 19725, 19293, 18411, 17757, 17169, 17046, 16569, 16408, 16248, 16073, 16030, 15775, 14065, 13921, 13529, 13136, 12633, 12213, 12092, 11659, 10983, 10871, 10712, 10679, 10110, 9981, 9687, 9494, 8597, 8241, 8042, 7948, 7896, 7602, 7543, 6734, 6529, 6372, 6095, 6038, 5708, 5679, 5234, 5175, 4941, 4936, 4806, 4222, 4153, 4102, 4079, 3767, 3673, 3538, 3483, 3408, 3350, 3305, 3253, 3121, 3105, 3061, 2894, 2642, 2604, 2600, 2585, 2306, 2294, 2199, 2140, 2035, 1916, 1639, 1619, 1598, 1469, 1393, 1276, 1219, 1160, 1132, 1068, 1010]
# A stripped down code.
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
df1=pd.read_csv(path,sep=',',header='infer')
xs=df1["col1"] # see values above
ys=df1["col1"] # ys was orignally float , and I converted it to int.
ax1.set_ylim(min(ys),max(ys))
#(1010, 10828623)
plt.plot(xs,ys)
ax1.set_ylim(min(ys),max(ys))
#(1010, 10828623)
This is because there is a point that is 10 million + - is there a way to keep this point and yet represent this in the plot ?
Also, given that the range is so spread out, I would expect it not to show 0.4 etc, but actual numbers ,even if in the scientific notation.
plt.show()
I have a list:
def clc = [[1, 15, 30, 42, 48, 100], [58, 99], [16, 61, 85, 96, 98], [2, 63, 84, 90, 91, 97], [16, 61, 85, 96], [23, 54, 65, 95], [16, 29, 83, 94], [0, 31, 42, 93], [33, 40, 51, 56, 61, 62, 64, 89, 92], [0, 63, 84, 90, 91]]
and a sublist
def subclc = [[1, 15, 30, 42, 48, 100], [58, 99], [16, 61, 85, 96, 98], [2, 63, 84, 90, 91, 97]]
I need to remove sublist from original list
I do so:
subclc.each{
clc.remove(it)
}
but it throws an exceprion Exception in thread "main" java.util.ConcurrentModificationException
I do not understand where is problem and how to solve the problem
Short answers:
For more groovyness and immutability, preserving the original list:
def removed = clc - subclc
assert removed == [[16, 61, 85, 96], [23, 54, 65, 95], [16, 29, 83, 94], [0, 31, 42, 93], [33, 40, 51, 56, 61, 62, 64, 89, 92], [0, 63, 84, 90, 91]]
And the java way, changing the original list:
clc.removeAll subclc
assert clc == [[16, 61, 85, 96], [23, 54, 65, 95], [16, 29, 83, 94], [0, 31, 42, 93], [33, 40, 51, 56, 61, 62, 64, 89, 92], [0, 63, 84, 90, 91]]
Long answer:
You are Iterator-ing through a list while changing the list. In this case you are better using Iterator.remove(), which is being abstracted by the foreach loop.
When you change the list using the foreach loop, you bump into the iterator checks for modification using checkForComodification(). Getting the iterator explicitly works:
list1 = [10,20,30,40,50,60,70,80,90]
list2 = [50,60,80]
def iter = list1.iterator()
while (iter.hasNext()) {
def item = iter.next()
if (list2.contains(item)) iter.remove()
}
assert list1 == [10,20,30,40,70,90]
Or you can use indexes. Note you need to control the index:
list1 = [10,20,30,40,50,60,70,80,90]
list2 = [50,60,80]
for (int i = 0; i < list1.size(); i++) {
def item = list1[i]
if (list2.contains(item)) { list1.remove i-- }
}
assert list1 == [10,20,30,40,70,90]