Python hmmlearn.hmm.MultinomialHMM() Examples

The following are 7 code examples for showing how to use hmmlearn.hmm.MultinomialHMM(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module hmmlearn.hmm , or try the search function .

Example 1
Project: deepbgc   Author: Merck   File: hmm.py    License: MIT License 6 votes vote down vote up
def _construct_model(self, startprob, transmat, emissionprob, vocabulary):
        """
        Create internal HMM model with given matrices and store it to self.model_
        :param startprob: Starting probability [negative_starting_prob, positive_starting_prob]
        :param transmat: Transition matrix (An array where the [i][j]-th element corresponds to the posterior probability of transitioning between the i-th to j-th)
        :param emissionprob: Emission probability [[neg_pfam1, neg_pfam2, ...], [pos_pfam1, pos_pfam2, ...]] with pfam IDs indexed by their vocabulary index numbers
        :param vocabulary: Vocabulary dictionary with {pfam_id: index_number_in_emission}
        :return: self
        """
        try:
            from hmmlearn import hmm
        except ImportError:
            raise get_hmmlearn_import_error()
        self.model_ = hmm.MultinomialHMM(n_components=2)
        if isinstance(startprob, list):
            startprob = np.array(startprob)
        if isinstance(transmat, list):
            transmat = np.array(transmat)
        self.model_.startprob_ = startprob
        self.model_.transmat_ = transmat
        self.model_.emissionprob_ = emissionprob
        self.vocabulary_ = vocabulary
        return self 
Example 2
Project: deepbgc   Author: Merck   File: hmm.py    License: MIT License 6 votes vote down vote up
def fit(self, X_list, y_list, startprob=None, transmat=None, verbose=1, debug_progress_path=None, validation_X_list=None, validation_y_list=None):
        if validation_X_list:
            logging.warning('GeneBorderHMM: Validation is present but has no effect yet')
        if verbose:
            logging.info('Training two state model...')

        two_state_model = DiscreteHMM()
        two_state_model.fit(X_list, y_list, startprob=startprob, transmat=transmat, verbose=verbose)

        emission, self.vocabulary_ = self._convert_emission(two_state_model.model_.emissionprob_, two_state_model.vocabulary_)

        from hmmlearn import hmm
        self.model_ = hmm.MultinomialHMM(n_components=4)
        self.model_.startprob_ = self._convert_startprob(startprob)
        self.model_.transmat_ = self._convert_transmat(transmat, X_list)
        self.model_.emissionprob_ = emission
        return self 
Example 3
Project: crepe   Author: marl   File: core.py    License: MIT License 5 votes vote down vote up
def to_viterbi_cents(salience):
    """
    Find the Viterbi path using a transition prior that induces pitch
    continuity.
    """
    from hmmlearn import hmm

    # uniform prior on the starting pitch
    starting = np.ones(360) / 360

    # transition probabilities inducing continuous pitch
    xx, yy = np.meshgrid(range(360), range(360))
    transition = np.maximum(12 - abs(xx - yy), 0)
    transition = transition / np.sum(transition, axis=1)[:, None]

    # emission probability = fixed probability for self, evenly distribute the
    # others
    self_emission = 0.1
    emission = (np.eye(360) * self_emission + np.ones(shape=(360, 360)) *
                ((1 - self_emission) / 360))

    # fix the model parameters because we are not optimizing the model
    model = hmm.MultinomialHMM(360, starting, transition)
    model.startprob_, model.transmat_, model.emissionprob_ = \
        starting, transition, emission

    # find the Viterbi path
    observations = np.argmax(salience, axis=1)
    path = model.predict(observations.reshape(-1, 1), [len(observations)])

    return np.array([to_local_average_cents(salience[i, :], path[i]) for i in
                     range(len(observations))]) 
Example 4
Project: hmmlearn   Author: hmmlearn   File: test_multinomial_hmm.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def setup_method(self, method):
        n_components = 2   # ['Rainy', 'Sunny']
        n_features = 3     # ['walk', 'shop', 'clean']
        self.h = hmm.MultinomialHMM(n_components)
        self.h.n_features = n_features
        self.h.startprob_ = np.array([0.6, 0.4])
        self.h.transmat_ = np.array([[0.7, 0.3], [0.4, 0.6]])
        self.h.emissionprob_ = np.array([[0.1, 0.4, 0.5],
                                         [0.6, 0.3, 0.1]]) 
Example 5
Project: hmmlearn   Author: hmmlearn   File: test_multinomial_hmm.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def setup_method(self, method):
        self.n_components = 2
        self.n_features = 3
        self.h = hmm.MultinomialHMM(self.n_components)
        self.h.startprob_ = np.array([0.6, 0.4])
        self.h.transmat_ = np.array([[0.7, 0.3], [0.4, 0.6]])
        self.h.emissionprob_ = np.array([[0.1, 0.4, 0.5], [0.6, 0.3, 0.1]]) 
Example 6
Project: hmmlearn   Author: hmmlearn   File: test_multinomial_hmm.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_fit_with_init(self, params='ste', n_iter=5):
        lengths = [10] * 10
        X, _state_sequence = self.h.sample(sum(lengths))

        # use init_function to initialize paramerters
        h = hmm.MultinomialHMM(self.n_components, params=params,
                               init_params=params)
        h._init(X, lengths=lengths)

        assert log_likelihood_increasing(h, X, lengths, n_iter) 
Example 7
Project: numpy-ml   Author: ddbourgin   File: hmm_plots.py    License: GNU General Public License v3.0 4 votes vote down vote up
def test_HMM():
    np.random.seed(12345)
    np.set_printoptions(precision=5, suppress=True)

    P = default_hmm()
    ls, obs = P["latent_states"], P["obs_types"]

    # generate a new sequence
    O = generate_training_data(P, n_steps=30, n_examples=25)

    tol = 1e-5
    n_runs = 5
    best, best_theirs = (-np.inf, []), (-np.inf, [])
    for _ in range(n_runs):
        hmm = MultinomialHMM()
        A_, B_, pi_ = hmm.fit(O, ls, obs, tol=tol, verbose=True)

        theirs = MHMM(
            tol=tol,
            verbose=True,
            n_iter=int(1e9),
            transmat_prior=1,
            startprob_prior=1,
            algorithm="viterbi",
            n_components=len(ls),
        )

        O_flat = O.reshape(1, -1).flatten().reshape(-1, 1)
        theirs = theirs.fit(O_flat, lengths=[O.shape[1]] * O.shape[0])

        hmm2 = MultinomialHMM(A=A_, B=B_, pi=pi_)
        like = np.sum([hmm2.log_likelihood(obs) for obs in O])
        like_theirs = theirs.score(O_flat, lengths=[O.shape[1]] * O.shape[0])

        if like > best[0]:
            best = (like, {"A": A_, "B": B_, "pi": pi_})

        if like_theirs > best_theirs[0]:
            best_theirs = (
                like_theirs,
                {
                    "A": theirs.transmat_,
                    "B": theirs.emissionprob_,
                    "pi": theirs.startprob_,
                },
            )
    print("Final log likelihood of sequence: {:.5f}".format(best[0]))
    print("Final log likelihood of sequence (theirs): {:.5f}".format(best_theirs[0]))
    plot_matrices(P, best, best_theirs)