Python Examples

The following are 30 code examples for showing how to use These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module , or try the search function .

Example 1
Project: KAIR   Author: cszn   File:    License: MIT License 14 votes vote down vote up
def cal_pca_matrix(path='PCA_matrix.mat', ksize=15, l_max=12.0, dim_pca=15, num_samples=500):
    kernels = np.zeros([ksize*ksize, num_samples], dtype=np.float32)
    for i in range(num_samples):

        theta = np.pi*np.random.rand(1)
        l1    = 0.1+l_max*np.random.rand(1)
        l2    = 0.1+(l1-0.1)*np.random.rand(1)

        k = anisotropic_Gaussian(ksize=ksize, theta=theta[0], l1=l1[0], l2=l2[0])

        # util.imshow(k)

        kernels[:, i] = np.reshape(k, (-1), order="F")  # k.flatten(order='F')

    # io.savemat('k.mat', {'k': kernels})

    pca_matrix = get_pca_matrix(kernels, dim_pca=dim_pca)

    io.savemat(path, {'p': pca_matrix})

    return pca_matrix 
Example 2
Project: DCC   Author: shahsohil   File:    License: MIT License 7 votes vote down vote up
def make_mnist_data(path, isconv=False):
    X, Y = load_mnist(path, True)
    X = X.astype(np.float64)
    X2, Y2 = load_mnist(path, False)
    X2 = X2.astype(np.float64)
    X3 = np.concatenate((X, X2), axis=0)

    minmaxscale = MinMaxScaler().fit(X3)

    X = minmaxscale.transform(X)
    if isconv:
        X = X.reshape((-1, 1, 28, 28))

    sio.savemat(osp.join(path, 'traindata.mat'), {'X': X, 'Y': Y})

    X2 = minmaxscale.transform(X2)
    if isconv:
        X2 = X2.reshape((-1, 1, 28, 28))

    sio.savemat(osp.join(path, 'testdata.mat'), {'X': X2, 'Y': Y2}) 
Example 3
Project: laplacian-meshes   Author: bmershon   File:    License: GNU General Public License v3.0 6 votes vote down vote up
def doLaplacianSolveWithConstraints(self, evt):
        anchorWeights = 1e8
        anchors = np.zeros((len(self.laplacianConstraints), 3))
        i = 0
        anchorsIdx = []
        for anchor in self.laplacianConstraints:
            anchors[i, :] = self.laplacianConstraints[anchor]
            i += 1
        #IGL Cotangent weights
        (L, M_inv, solver, deltaCoords) = makeLaplacianMatrixSolverIGLSoft(self.mesh.VPos, self.mesh.ITris, anchorsIdx, anchorWeights)
        self.mesh.VPos = solveLaplacianMatrixIGLSoft(solver, L, M_inv, deltaCoords, anchorsIdx, anchors, anchorWeights)
#        #My umbrella weights
#        L = makeLaplacianMatrixUmbrellaWeights(self.mesh.VPos, self.mesh.ITris, anchorsIdx, anchorWeights)
#        deltaCoords =[0:self.mesh.VPos.shape[0], :]
#        self.mesh.VPos = np.array(solveLaplacianMatrix(L, deltaCoords, anchors, anchorWeights), dtype=np.float32)
        sio.savemat("anchors.mat", {'deltaCoords':deltaCoords, 'anchors':anchors, 'anchorsIdx':np.array(anchorsIdx)})
        self.mesh.needsDisplayUpdate = True
Example 4
Project: DeepHash   Author: thulab   File:    License: MIT License 6 votes vote down vote up
def precision_recall(params):
    database_code = np.array(params['database_code'])
    validation_code = np.array(params['validation_code'])
    database_labels = np.array(params['database_labels'])
    validation_labels = np.array(params['validation_labels'])
    database_code = np.sign(database_code)
    validation_code = np.sign(validation_code)

    sim =, validation_code.T)
    ids = np.argsort(-sim, axis=0)
    ones = np.ones((ids.shape[0], ids.shape[1]),
    ids = ids + ones
    mat_ids = dict(
    scio.savemat('./data/data.mat', mat_ids) 
Example 5
Project: Computable   Author: ktraunmueller   File:    License: MIT License 6 votes vote down vote up
def bench_run():
    str_io = BytesIO()
    print('Read / writing matlab structs')
    print(' write |  read |   vars | fields | structs | compressed')
    for n_vars, n_fields, n_structs in (
        (10, 10, 20), (20, 20, 40), (30, 30, 50)):
        var_dict = make_structarr(n_vars, n_fields, n_structs)
        for compression in (False, True):
            str_io = BytesIO()
            write_time = measure('sio.savemat(str_io, var_dict, do_compression=%r)' % compression)
            read_time = measure('sio.loadmat(str_io)')
            print('%.5f | %.5f | %5d | %5d | %5d | %r' % (
Example 6
Project: Iris-Recognition   Author: thuyngch   File:    License: MIT License 6 votes vote down vote up
def createAccount(template, mask, name, exinfo):
		Create an account in database based on extracted feature, and some
		extra information from the enroller.

		template 	- Extracted template from the iris image
		mask		- Extracted mask from the iris image
		name		- Name of the enroller
		exinfo		- Extra information of the enroller
	# Get file name for the account
	files = []
	for file in os.listdir(temp_database_path):
	    if file.endswith(".mat"):
	filename = str(len(files) + 1)

	# Save the file
	sio.savemat(temp_database_path + filename + '.mat',	\
		mdict={'template':template, 'mask':mask,\
		'name':name, 'exinfo':exinfo}) 
Example 7
Project: bonnet   Author: PRBonn   File:    License: GNU General Public License v3.0 6 votes vote down vote up
def predict_probs(img, net, FLAGS, DATA):
  # open image
  cvim = cv2.imread(img, cv2.IMREAD_UNCHANGED)
  if cvim is None:
    print("No image to open for ", img)
  # predict mask from image
  start = time.time()
  probs = net.predict(cvim, path=FLAGS.path + '/' +
                      FLAGS.model, verbose=FLAGS.verbose, as_probs=True)
  print("Prediction for img ", img, ". Elapsed: ", time.time() - start, "s")

  # save to matlab matrix
  matname = FLAGS.log + "/" + \
      os.path.splitext(os.path.basename(img))[0] + ".mat"
  sio.savemat(matname, {'p': probs})

Example 8
Project: SegMitos_mitosis_detection   Author: ChaoLi977   File:    License: MIT License 6 votes vote down vote up
def stitchPatch(root_folder, dir1, imgname, featfolder, savefolder):  
    # stitch the features of patches to feature of full image
	name = os.path.join(dir1, imgname)
	print 'name:%s\n' %(name)
	Im = os.path.join(featfolder, name[0:-4])
	I = [None]*16
	for i in range(9):
		dict1 = sio.loadmat(Im+'_0'+str(i+1)+'.mat')
		I[i] = dict1['feat']
	for i in range(9,16):
		dict2 = sio.loadmat(Im+'_'+str(i+1)+'.mat')
		I[i] = dict2['feat']
	A = np.zeros((4*500,4*500))
	for row in range(4):
		for col in range(4):
			A[row*500:(row+1)*500,col*500:(col+1)*500] = I[row*4+col]
	sio.savemat(savefolder+name[0:-4], {'A':np.mat(A)}) 
Example 9
def get_feature():
    inputs = tf.placeholder("float", [None, 64, 64, 1])
    is_training = tf.placeholder("bool")
    _, feature = googlenet(inputs, is_training)
    feature = tf.squeeze(feature, [1, 2])
    sess = tf.Session()
    saver = tf.train.Saver()
    data = sio.loadmat("../data/dataset.mat")
    testdata = data["test"] / 127.5 - 1.0
    testlabels = data["testlabels"]
    saver.restore(sess, "../save_para/.\\model.ckpt")
    nums_test = testdata.shape[0]
    FEATURE = np.zeros([nums_test, 1024])
    for i in range(nums_test // BATCH_SIZE):
        FEATURE[i * BATCH_SIZE:i * BATCH_SIZE + BATCH_SIZE] =, feed_dict={inputs: testdata[i * BATCH_SIZE:i * BATCH_SIZE + BATCH_SIZE], is_training: False})
    FEATURE[(nums_test // BATCH_SIZE - 1) * BATCH_SIZE + BATCH_SIZE:] =, feed_dict={inputs: testdata[(nums_test // BATCH_SIZE - 1) * BATCH_SIZE + BATCH_SIZE:], is_training: False})
    sio.savemat("../data/feature.mat", {"feature": FEATURE, "testlabels": testlabels}) 
Example 10
def tsne():
    data = sio.loadmat("../data/feature.mat")
    feature_test = data["feature"]
    proj = TSNE().fit_transform(feature_test)
    sio.savemat("../data/proj.mat", {"proj": proj}) 
Example 11
Project: me-ica   Author: ME-ICA   File:    License: GNU Lesser General Public License v2.1 5 votes vote down vote up
def to_file_map(self, file_map=None):
        ''' Write image to `file_map` or contained ``self.file_map``

        Extends Analyze ``to_file_map`` method by writing ``mat`` file

        file_map : None or mapping, optional
           files mapping.  If None (default) use object's ``file_map``
           attribute instead
        if file_map is None:
            file_map = self.file_map
        super(Spm99AnalyzeImage, self).to_file_map(file_map)
        mat = self._affine
        if mat is None:
        import as sio
        hdr = self._header
        if hdr.default_x_flip:
            M =[-1, 1, 1, 1]), mat)
            M = mat
        # Adjust for matlab 1,1,1 voxel origin
        from_111 = np.eye(4)
        from_111[:3,3] = -1
        M =, from_111)
        mat =, from_111)
        # use matlab 4 format to allow gzipped write without error
        mfobj = file_map['mat'].get_prepare_fileobj(mode='wb')
        sio.savemat(mfobj, {'M': M, 'mat': mat}, format='4')
        if file_map['mat'].filename is not None: # was filename
Example 12
Project: hdidx   Author: hdidx   File:    License: MIT License 5 votes vote down vote up
def create_random_data(ntrain=10**4, nbase=10**4, nquery=10**2):
    Create random data
    # synthetic dataset
    vtrain, vbase, vquery, ids_gnd = load_random(ntrain, nbase, nquery)
    spio.savemat('./test-tmp/hdidx_test_vbase.mat', {'feat': vbase[:10, :]})

    return np.require(vtrain, np.single, requirements="C"),\
        np.require(vbase, np.single, requirements="C"),    \
        np.require(vquery, np.single, requirements="C"),   \
Example 13
Project: DCC   Author: shahsohil   File:    License: MIT License 5 votes vote down vote up
def save_misc_data(path, X, Y, N):
    threshold_index = int(N * 4/5)
    sio.savemat(osp.join(path, 'traindata.mat'), {'X': X[:threshold_index], 'Y': Y[:threshold_index]})
    sio.savemat(osp.join(path, 'testdata.mat'), {'X': X[threshold_index:], 'Y': Y[threshold_index:]}) 
Example 14
Project: DCC   Author: shahsohil   File:    License: MIT License 5 votes vote down vote up
def compressed_data(dataset, n_samples, k, preprocess=None, algo='mknn', isPCA=None, format='mat'):
    datadir = get_data_dir(dataset)
    if format == 'pkl':
        labels, features = load_train_and_validation(load_data, datadir, n_samples)
    elif format == 'h5':
        labels, features = load_train_and_validation(load_data_h5py, datadir, n_samples)
        labels, features = load_train_and_validation(load_matdata, datadir, n_samples)

    features = feature_transformation(features, preprocessing=preprocess)

    # PCA is computed for Text dataset. Please refer RCC paper for exact details.
    features1 = features.copy()
    if isPCA is not None:
        pca = PCA(n_components=isPCA, svd_solver='full').fit(features)
        features1 = pca.transform(features)

    t0 = time()

    if algo == 'knn':
        weights = kNN(features1, k=k, measure='euclidean')
        weights = mkNN(features1, k=k, measure='cosine')

    print('The time taken for edge set computation is {}'.format(time() - t0))

    filepath = os.path.join(datadir, 'pretrained')
    if format == 'h5':
        import h5py
        fo = h5py.File(filepath + '.h5', 'w')
        fo.create_dataset('X', data=features)
        fo.create_dataset('w', data=weights[:, :2])
        fo.create_dataset('gtlabels', data=labels)
        sio.savemat(filepath + '.mat', mdict={'X': features, 'w': weights[:, :2], 'gtlabels': labels}) 
Example 15
Project: adversarial-attacks   Author: hmph   File:    License: MIT License 5 votes vote down vote up
def SavePredictionScores(pred_scores, adv_scores, im_height, im_width, args, is_debug=False):
    """Saves the outputs of the network in a mat file."""

    pred_scores = softmax(pred_scores)
    adv_scores  = softmax(adv_scores)

    conf = pred_scores.max(axis = 0)
    adv_conf = adv_scores.max(axis = 0)

    entropy_map = entropy(pred_scores)
    conf_ratio_map = conf_ratio(pred_scores)

    adv_entropy_map = entropy(adv_scores)
    adv_conf_ratio_map = conf_ratio(adv_scores)

    model_name = args.model_name
    image_name = os.path.basename(args.image).split('.')[0]
    save_name = os.path.join(
        args.out_dir, "{}_scores_{}_eps={}.mat".format(image_name, model_name, args.eps))

    if not is_debug:
        sio.savemat(save_name, {'conf': conf, 'adv_conf': adv_conf, 'im_height' : im_height, 'im_width': im_width, 'entropy': entropy_map, 'conf_ratio': conf_ratio_map, 'adv_entropy': adv_entropy_map, 'adv_conf_ratio': adv_conf_ratio_map}, do_compression=True)
        sio.savemat(save_name, {'unary': pred_scores, 'unary_adv': adv_scores, 'conf': conf, 'adv_conf': adv_conf, 'im_height' : im_height, 'im_width': im_width, 'entropy': entropy_map, 'conf_ratio': conf_ratio_map, 'adv_entropy': adv_entropy_map, 'adv_conf_ratio': adv_conf_ratio_map}, do_compression=True)
    return conf 
Example 16
Project: gpkit   Author: convexengineering   File:    License: MIT License 5 votes vote down vote up
def savemat(self, filename="solution.mat", showvars=None,
                excluded=("unnecessary lineage", "vec")):
        "Saves primal solution as matlab file"
        from import savemat
                {name.replace(".", "_"): np.array(self["variables"][key], "f")
                 for name, key in self.varnames(showvars, excluded).items()}) 
Example 17
Project: spyder-kernels   Author: spyder-ide   File:    License: MIT License 5 votes vote down vote up
def save_matlab(data, filename):
                spio.savemat(filename, data, oned_as='row')
            except Exception as error:
                return str(error) 
Example 18
Project: UNet-Zoo   Author: shreyaspadhy   File:    License: MIT License 5 votes vote down vote up
def __init__(self, dataset_folder, train=True, keywords=["P1", "1", "flair"], im_size=[128, 128], transform=None):

        self.__file = []
        self.__im = []
        self.__mask = []
        self.im_ht = im_size[0]
        self.im_wd = im_size[1]
        self.transform = transform

        folder = dataset_folder
        # # Open and load text file including the whole training data
        if train:
            folder = dataset_folder + "Train/"
            folder = dataset_folder + "Test/"

        for file in os.listdir(folder):
            if file.endswith(".png"):
                filename = os.path.splitext(file)[0]
                filename_fragments = filename.split("_")
                samekeywords = list(set(filename_fragments) & set(keywords))
                if len(samekeywords) == len(keywords):
                    # 1. read file name
                    # 2. read raw image
                    # TODO: I think we should open image only in getitem,
                    # otherwise memory explodes

                    # rawImage = getImg(folder + file)
                    self.__im.append(folder + file)
                    # 3. read mask image
                    mask_file = getMaskFileName(file)
                    # maskImage = getImg(folder + mask_file)
                    self.__mask.append(folder + mask_file)
        # self.dataset_size = len(self.__file)

        # print("lengths : ", len(self.__im), len(self.__mask))
        self.dataset_size = len(self.__file)

        if not train:
            sio.savemat('filelist2.mat', {'data': self.__im}) 
Example 19
Project: aletheia   Author: daniellerch   File:    License: MIT License 5 votes vote down vote up
def predict_proba(self, X):

        basedir=os.path.abspath(os.path.join(currdir, os.pardir))
        m_path=os.path.join(basedir, 'external', 'octave')



        #savemat(path, mdict={'F': numpy.array(X)}, oned_as='column')
        hdf5storage.write({u'F': numpy.array(X)}, '.', path, matlab_compatible=True)

        savemat(pclf, self.__mat_clf)


        m_code+="cd "+self.__tmpdir+";"
        m_code+="ensemble_predict('"+pclf+"', '"+path+"', '"+pvotes+"');"
        p=subprocess.Popen(M_BIN+" \""+m_code+"\"", stdout=subprocess.PIPE, shell=True)
        #output, err = p.communicate()
        status = p.wait()

        with open(pvotes, 'r') as f:


        for l in lines:
            prob.append( [1-votes, votes] )

        return prob 
Example 20
Project: aletheia   Author: daniellerch   File:    License: MIT License 5 votes vote down vote up
def save(self, path):
        savemat(path, self.__mat_clf, appendmat=False) 
Example 21
Project: TCFPN-ISBA   Author: Zephyr-D   File:    License: MIT License 5 votes vote down vote up
def save_predictions(dir_out, y_pred, y_truth, idx_task, experiment_name=""):
    if experiment_name != "":
        dir_out += "/{}/".format(experiment_name)
    # Make sure fiolder exists
    if not os.path.isdir(dir_out):

    truth_test_all_out = {"t{}_{}".format(idx_task, k): v for (k, v) in enumerate(y_truth)}
    predict_test_all_out = {"t{}_{}".format(idx_task, k): v for k, v in enumerate(y_pred)}
    sio.savemat(dir_out + "/{}_truth.mat".format(idx_task), truth_test_all_out)
    sio.savemat(dir_out + "/{}_predict.mat".format(idx_task), predict_test_all_out)

# ------------- Vision ------------- 
Example 22
Project: ZSL2018_Zero_Shot_Learning   Author: KaiJin1995   File:    License: MIT License 5 votes vote down vote up
def trainFeature(model, train_loader, device):
    feats = torch.empty(len(train_loader.dataset), 2048)
    labels = torch.empty(len(train_loader.dataset), 1)
    with torch.no_grad():
        for batch_idx, sample in enumerate(train_loader):
            data = sample[0].to(device)
            label = torch.from_numpy(np.array(sample[1])).to(device)
            cnn_feat = model(data)[0]
            feats[batch_idx*64:(batch_idx+1)*64, :] = cnn_feat
            labels[batch_idx*64:(batch_idx+1)*64, 0] = label
        sio_content = {"features":feats.numpy(), "label":labels.numpy()}

        sio.savemat("/home/xd133/ZJL_Fusai/Feature_1029/train.mat", sio_content) 
Example 23
Project: ZSL2018_Zero_Shot_Learning   Author: KaiJin1995   File:    License: MIT License 5 votes vote down vote up
def testFeature(model, test_loader, device):
    feats = torch.empty(len(test_loader.dataset), 2048)

    with torch.no_grad():
        for batch_idx, sample in enumerate(test_loader):
            data = sample[0].to(device)
            cnn_feat = model(data)[0]
            feats[batch_idx*64:(batch_idx+1)*64, :] = cnn_feat

            print("the batchidx is %d" % batch_idx)
        sio_content = {"features":feats.numpy()}

        sio.savemat("/home/xd133/ZJL_Fusai/Feature_1029/test.mat", sio_content) 
Example 24
Project: mmfashion   Author: open-mmlab   File:    License: Apache License 2.0 5 votes vote down vote up
def extract_features(image_set, cfg, save_feature_dir):

    model = build_retriever(cfg.model)
    print('model built')
    model = MMDataParallel(model, device_ids=cfg.gpus.test).cuda()

    embeds = _process_embeds(image_set, model, cfg)

    if not os.path.exists(save_feature_dir):
    save_path = os.path.join(save_feature_dir, 'extracted_features.mat')

    sio.savemat(save_path, {'embeds': embeds})
    print('extracted features saved to : %s' % save_path) 
Example 25
Project: Counting-ICCV-DSSINet   Author: Legion56   File:    License: MIT License 5 votes vote down vote up
def save_density_raw(density_map, output_dir, fname='results.mat'):
    scio.savemat(os.path.join(output_dir, fname), {'data': density_map}) 
Example 26
Project: hover_net   Author: vqdang   File:    License: MIT License 5 votes vote down vote up
def run(self):

        if self.inf_auto_find_chkpt:
            print('-----Auto Selecting Checkpoint Basing On "%s" Through "%s" Comparison' % \
                        (self.inf_auto_metric, self.inf_auto_comparator))
            model_path, stat = get_best_chkpts(self.save_dir, self.inf_auto_metric, self.inf_auto_comparator)
            print('Selecting: %s' % model_path)
            print('Having Following Statistics:')
            for key, value in stat.items():
                print('\t%s: %s' % (key, value))
            model_path = self.inf_model_path

        model_constructor = self.get_model()
        pred_config = PredictConfig(
            model        = model_constructor(),
            session_init = get_model_loader(model_path),
            input_names  = self.eval_inf_input_tensor_names,
            output_names = self.eval_inf_output_tensor_names)
        predictor = OfflinePredictor(pred_config)

        save_dir = self.inf_output_dir
        file_list = glob.glob('%s/*%s' % (self.inf_data_dir, self.inf_imgs_ext))
        file_list.sort() # ensure same order

        for filename in file_list:
            filename = os.path.basename(filename)
            basename = filename.split('.')[0]
            print(self.inf_data_dir, basename, end=' ', flush=True)

            img = cv2.imread(self.inf_data_dir + filename)
            img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)

            pred_map = self.__gen_prediction(img, predictor)
            sio.savemat('%s/%s.mat' % (save_dir, basename), {'result':[pred_map]})

Example 27
Project: DAMDNet   Author: LeiJiangJNU   File:    License: Apache License 2.0 5 votes vote down vote up
def dump_vertex(vertex, wfp):
    sio.savemat(wfp, {'vertex': vertex})
    print('Dump to {}'.format(wfp)) 
Example 28
Project: DAMDNet   Author: LeiJiangJNU   File:    License: Apache License 2.0 5 votes vote down vote up
def triDelaunay(pts68):
    ax=fig.add_subplot(1, 1, 1, projection='3d')
    ax.plot_trisurf(pts68[0], pts68[1], pts68[2], triangles=tri.simplices,
Example 29
Project: SegMitos_mitosis_detection   Author: ChaoLi977   File:    License: MIT License 5 votes vote down vote up
def compCentroid_detect1(fcn, savefolder):
	data_dict = sio.loadmat(fcn)
	f = matlab_style_gauss2D((10,10),0.25)
	A = cv2.filter2D(data_dict['A'], -1, f)

	level = threshold_otsu(A) #otsu threshold of image
	bw = A > level #binary image
	L,num = label(bw,8,return_num=True) #label  the segmented blobs
	#import pdb;pdb.set_trace()
	plot_x = np.zeros((num, 1)) # location of centroid
	plot_y = np.zeros((num, 1))

	sum_x = np.zeros((num, 1))
	sum_y = np.zeros((num, 1))
	area = np.zeros((num, 1))
	score = np.zeros((num, 1))

	height,width = bw.shape[0], bw.shape[1]
	for i in range(height):
		for j in range(width):
			if L[i,j] != 0:
				N = L[i,j]
				sum_x[N-1] = sum_x[N-1]+i*A[i,j]
				sum_y[N-1] = sum_y[N-1]+j*A[i,j]
				area[N-1] = area[N-1] + 1
				score[N-1] = score[N-1] + A[i,j]

	plot_x = np.around(sum_x*1.0/score)
	plot_y = np.around(sum_y*1.0/score)
	score = score*1.0/area
	centroid = np.zeros((num,2))
	for row in range(num):
		centroid[row,0] = plot_x[row,0]
		centroid[row,1] = plot_y[row,0]
	#centroid = np.mat(centroid)
	savefile = savefolder + fcn[-9:]
	sio.savemat(savefile,{'centroid':centroid, 'area':area, 'score':score}) 
Example 30
Project: sem   Author: signetlabdei   File:    License: GNU General Public License v2.0 5 votes vote down vote up
def save_to_mat_file(self, parameter_space,
                         filename, runs):
        Return the results relative to the desired parameter space in the form
        of a .mat file.

            parameter_space (dict): dictionary containing
                parameter/list-of-values pairs.
            result_parsing_function (function): user-defined function, taking a
                result dictionary as argument, that can be used to parse the
                result files and return a list of values.
            filename (path): name of output .mat file.
            runs (int): number of runs to gather for each parameter

        # Make sure all values are lists
        for key in parameter_space:
            if not isinstance(parameter_space[key], list):
                parameter_space[key] = [parameter_space[key]]

        # Add a dimension label for each non-singular dimension
        dimension_labels = [{key: str(parameter_space[key])} for key in
                            parameter_space.keys() if len(parameter_space[key])
                            > 1] + [{'runs': range(runs)}]

        # Create a list of the parameter names

        return savemat(
             'dimension_labels': dimension_labels})