-
Notifications
You must be signed in to change notification settings - Fork 106
[add] tests refactoring per backend #296
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from 1 commit
Commits
Show all changes
21 commits
Select commit
Hold shift + click to select a range
29548e7
[add] tests refactoring per backend
filipecosta90 53e3fb0
[add] extended testing
filipecosta90 b87f518
Merge branch 'master' into test.refactor
filipecosta90 3e24a77
Merge remote-tracking branch 'origin/master' into test.refactor
filipecosta90 9c70df5
[add] added specific modelget and scriptget tests on slaves. added te…
filipecosta90 084bc40
[fix] disabled SCRIPTGET new test since it's hanging CI ( further inv…
filipecosta90 95a861b
[fix] disabled SCRIPTGET new test since it's hanging CI ( further inv…
filipecosta90 3664d70
[fix] fixing encoding issue on string comparison
filipecosta90 c8becdb
[add] refactored tests_common to test for tensorset and tensorget acr…
filipecosta90 1a5a567
[add] added modelrun scriptrun disconnect test cases (test for client…
filipecosta90 b1f0c03
[add] increased the GPU tests timeout since we added more tests
filipecosta90 bd5dd66
[add] added valgrind options to RLTest. pruned testing.
filipecosta90 358dde6
[add] added valgrind options to RLTest. pruned testing.
filipecosta90 fc20252
[fix] fixed leak on RedisAI_ReplicateTensorSet
filipecosta90 e492990
[fix] fixed leak on ret->devicestr in RAI_ModelCreateTorch
filipecosta90 ad006c6
[fix] skipping modelrun and scriptrun disconnect on gpu test
filipecosta90 d7a95f8
[add] tests prunning for ci
filipecosta90 74da560
[fix] fixing gpu tests for CI
filipecosta90 f5a4249
[add] hardened ensureSlaveSynced
filipecosta90 a74f397
[fix] fixed Makefile in accordance to PR review, [add] splitted tf mo…
filipecosta90 e163b7b
Delete MakefileCopy
filipecosta90 File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,88 @@ | ||
| import json | ||
| import os | ||
| import random | ||
| import sys | ||
| import time | ||
| from multiprocessing import Process | ||
|
|
||
| import numpy as np | ||
| from skimage.io import imread | ||
| from skimage.transform import resize | ||
|
|
||
| try: | ||
| sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../../deps/readies")) | ||
| import paella | ||
| except: | ||
| pass | ||
|
|
||
| TEST_TF = os.environ.get("TEST_TF") != "0" and os.environ.get("WITH_TF") != "0" | ||
| TEST_TFLITE = os.environ.get("TEST_TFLITE") != "0" and os.environ.get("WITH_TFLITE") != "0" | ||
| TEST_PT = os.environ.get("TEST_PT") != "0" and os.environ.get("WITH_PT") != "0" | ||
| TEST_ONNX = os.environ.get("TEST_ONNX") != "0" and os.environ.get("WITH_ORT") != "0" | ||
| DEVICE = os.environ.get('DEVICE', 'CPU').upper() | ||
| print(f"Running tests on {DEVICE}\n") | ||
|
|
||
|
|
||
| def ensureSlaveSynced(con, env): | ||
| if env.useSlaves: | ||
| # When WAIT returns, all the previous write commands | ||
| # sent in the context of the current connection are | ||
| # guaranteed to be received by the number of replicas returned by WAIT. | ||
| wait_reply = con.execute_command('WAIT', '1', '1000') | ||
| env.assertTrue(wait_reply >= 1) | ||
|
|
||
|
|
||
| def check_cuda(): | ||
| return os.system('which nvcc') | ||
|
|
||
|
|
||
| def info_to_dict(info): | ||
| info = [el.decode('ascii') if type(el) is bytes else el for el in info] | ||
| return dict(zip(info[::2], info[1::2])) | ||
|
|
||
|
|
||
| def load_mobilenet_test_data(): | ||
| test_data_path = os.path.join(os.path.dirname(__file__), 'test_data') | ||
| labels_filename = os.path.join(test_data_path, 'imagenet_class_index.json') | ||
| image_filename = os.path.join(test_data_path, 'panda.jpg') | ||
| model_filename = os.path.join(test_data_path, 'mobilenet_v2_1.4_224_frozen.pb') | ||
|
|
||
| with open(model_filename, 'rb') as f: | ||
| model_pb = f.read() | ||
|
|
||
| with open(labels_filename, 'r') as f: | ||
| labels = json.load(f) | ||
|
|
||
| img_height, img_width = 224, 224 | ||
|
|
||
| img = imread(image_filename) | ||
| img = resize(img, (img_height, img_width), mode='constant', anti_aliasing=True) | ||
| img = img.astype(np.float32) | ||
|
|
||
| return model_pb, labels, img | ||
|
|
||
|
|
||
| def run_mobilenet(con, img, input_var, output_var): | ||
| time.sleep(0.5 * random.randint(0, 10)) | ||
| con.execute_command('AI.TENSORSET', 'input', | ||
| 'FLOAT', 1, img.shape[1], img.shape[0], img.shape[2], | ||
| 'BLOB', img.tobytes()) | ||
|
|
||
| con.execute_command('AI.MODELRUN', 'mobilenet', | ||
| 'INPUTS', 'input', 'OUTPUTS', 'output') | ||
|
|
||
|
|
||
| def run_test_multiproc(env, n_procs, fn, args=tuple()): | ||
| procs = [] | ||
|
|
||
| def tmpfn(): | ||
| con = env.getConnection() | ||
| fn(con, *args) | ||
| return 1 | ||
|
|
||
| for _ in range(n_procs): | ||
| p = Process(target=tmpfn) | ||
| p.start() | ||
| procs.append(p) | ||
|
|
||
| [p.join() for p in procs] |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.