suppressPackageStartupMessages({
    library(gmmase)
    library(igraph)
})

set.seed(123)
pm <- cbind( c(.2, .001), c(.001, .3) )
g <- sample_sbm(1000, pref.matrix=pm, block.sizes=c(300,700), directed=TRUE)
E(g)$weight <- stats::runif(ecount(g), 1, 5) # add random edge weights
summary(g)
## IGRAPH c497a1d D-W- 1000 164902 -- Stochastic block-model
## + attr: name (g/c), loops (g/l), weight (e/n)
Y <- gmmase(g, dmax=20, verbose=FALSE, doplot=TRUE)
## 1. Finding an lcc...
## IGRAPH f9a9a57 D-W- 1000 164902 -- Stochastic block-model
## + attr: name (g/c), loops (g/l), weight (e/n)
## 2. Passing-to-rank...
## IGRAPH f9a9a57 D-W- 1000 164902 -- Stochastic block-model
## + attr: name (g/c), loops (g/l), weight (e/n)
## 3. Embedding the graph into dmax = 20...
## 4. Finding an elbow (dimension reduction)...

## , use dhat =  2 
## 5. Clustering vertices..., Khat =  7

## ----------------------------------------------------
## Gaussian finite mixture model fitted by EM algorithm 
## ----------------------------------------------------
## 
## Mclust VVE (ellipsoidal, equal orientation) model with 7 components:
## 
##  log.likelihood    n df      BIC      ICL
##        12400.19 1000 68 24330.65 24237.56
## 
## Clustering table:
##   1   2   3   4   5   6   7 
##  72  59  88  81 123 143 434
table(Y$Y)
## 
##   1   2   3   4   5   6   7 
##  72  59  88  81 123 143 434