Commit 073a1833 authored by Martino Bertoni's avatar Martino Bertoni 🌋
Browse files

added method to submit a test job, which check python version tensorflow and faiss

parent 18ad1334
...@@ -4,6 +4,9 @@ Allow the initialization of any of the defined queueing systems. ...@@ -4,6 +4,9 @@ Allow the initialization of any of the defined queueing systems.
Provide a shared interface to get job status, check log for errors, and Provide a shared interface to get job status, check log for errors, and
compress log output. compress log output.
""" """
import os
import shutil
import pathlib
from .sge import sge from .sge import sge
from .slurm import slurm from .slurm import slurm
from .local import local from .local import local
...@@ -120,3 +123,44 @@ class HPC(): ...@@ -120,3 +123,44 @@ class HPC():
* ``ready``: Job finished without errors * ``ready``: Job finished without errors
""" """
return self.hpc.status() return self.hpc.status()
@classmethod
def test_job(cls, job_path, config=None, params=None):
from chemicalchecker import Config
if config is None:
config = Config()
cluster = cls.from_config(config)
cc_config_path = config.config_path
cc_package = os.path.join(config.PATH.CC_REPO, 'package')
singularity_image = config.PATH.SINGULARITY_IMAGE
hpc_path = pathlib.Path(__file__).parent.absolute()
script_name_src = os.path.join(hpc_path, 'test_script.py')
script_name_dst = os.path.join(job_path, 'test_script.py')
shutil.copy(script_name_src, script_name_dst)
command = ' '.join([
"SINGULARITYENV_PYTHONPATH={}",
"SINGULARITYENV_CC_CONFIG={}",
"singularity exec {}",
"python {}"
])
command = command.format(
cc_package,
cc_config_path,
singularity_image,
script_name_dst)
print('CMD: {}'.format(command))
def_params = {}
def_params["num_jobs"] = 1
def_params["jobdir"] = job_path
def_params["job_name"] = "TEST"
def_params["wait"] = True
def_params["cpu"] = 1
if params is None:
params = def_params
else:
for k, v in def_params.items():
if k not in params:
params[k] = v
for k, v in params.items():
cls.__log.debug('{:<20} : {}'.format(k, v))
job = cluster.submitMultiJob(command, **params)
import logging
import chemicalchecker
logging.log(logging.INFO, '{:<10} : {}'.format(
'chemicalchecker', chemicalchecker.__path__))
logging.log(logging.INFO, '********** TEST START **********')
# print system info
import os
import sys
logging.log(logging.INFO, '{:<10} : {}'.format(
'python', os.path.dirname(sys.executable)))
logging.log(logging.INFO, sys.version)
# print node info
import platform
for k, v in platform.uname()._asdict().items():
logging.log(logging.INFO, '{:<10} : {}'.format(k, v))
# test tensorflow
import tensorflow as tf
logging.log(logging.INFO, '{:<10} : {}'.format('tensorflow', tf.__version__))
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(10)
])
loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
model.compile(optimizer='adam',
loss=loss_fn,
metrics=['accuracy'])
model.fit(x_train, y_train, epochs=5)
# test faiss
import faiss
import numpy as np
logging.log(logging.INFO, '{:<10} : {}'.format('faiss', faiss.__version__))
logging.log(logging.INFO, '{:<10} : {}'.format('numpy', np.__version__))
faiss.Kmeans(10, 20).train(np.random.rand(1000, 10).astype(np.float32))
logging.log(logging.INFO, '********** TEST DONE **********')
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment