The LesMiserables network dataset is provided as a gml file, containing 1589 nodes and 2742 edges.
# Start the timer
t1 <- system.time({
dataset_path <- system.file("extdata", "netscience.gml", package = "arlclustering")
if (dataset_path == "") {
stop("netscience.gml file not found")
}
g <- arlc_get_network_dataset(dataset_path, "NetScience")
g$graphLabel
g$totalNodes
g$totalEdges
g$averageDegree
})
# Display the total processing time
message("Graph loading Processing Time: ", t1["elapsed"], " seconds\n")
#> Graph loading Processing Time: 0.0390000000000015 seconds
Next, we generate transactions from the graph, with a total rows of 1140.
We obtain the apriori thresholds for the generated transactions. The following are the thresholds for the apriori execution: - The Minimum Support : 0.011 - The Minimum Confidence : 0.5 - The Lift : 66.16071 - The Gross Rules length : 875908 - The selection Ratio : 768
# Start the timer
t3 <- system.time({
params <- arlc_get_apriori_thresholds(transactions,
supportRange = seq(0.011, 0.012, by = 0.001),
Conf = 0.5)
params$minSupp
params$minConf
params$bestLift
params$lenRules
params$ratio
})
# Display the total processing time
message("Graph loading Processing Time: ", t3["elapsed"], " seconds\n")
#> Graph loading Processing Time: 2.17 seconds
We use the obtained parameters to generate gross rules, where we obtain 875908 rules.
# Start the timer
t4 <- system.time({
minLenRules <- 1
maxLenRules <- params$lenRules
if (!is.finite(maxLenRules) || maxLenRules > 5*length(transactions)) {
maxLenRules <- 5*length(transactions)
}
grossRules <- arlc_gen_gross_rules(transactions,
minSupp = params$minSupp,
minConf = params$minConf,
minLenRules = minLenRules+1,
maxLenRules = maxLenRules)
grossRules$TotalRulesWithLengthFilter
})
#> Apriori
#>
#> Parameter specification:
#> confidence minval smax arem aval originalSupport maxtime support minlen
#> 0.5 0.1 1 none FALSE TRUE 5 0.011 2
#> maxlen target ext
#> 5700 rules TRUE
#>
#> Algorithmic control:
#> filter tree heap memopt load sort verbose
#> 0.1 TRUE TRUE FALSE TRUE 2 TRUE
#>
#> Absolute minimum support count: 12
#>
#> set item appearances ...[0 item(s)] done [0.00s].
#> set transactions ...[1247 item(s), 1140 transaction(s)] done [0.00s].
#> sorting and recoding items ... [40 item(s)] done [0.00s].
#> creating transaction tree ... done [0.00s].
#> checking subsets of size 1 2 3 4 5 6 7 8 done [0.07s].
#> writing ... [875908 rule(s)] done [0.13s].
#> creating S4 object ... done [0.31s].
We filter out redundant rules from the generated gross rules. Next, we filter out non-significant rules from the non-redundant rules, and we obtain the 388 rule items.
t5 <- system.time({
NonRedRules <- arlc_get_NonR_rules(grossRules$GrossRules)
NonRSigRules <- arlc_get_significant_rules(transactions,
NonRedRules$FiltredRules)
NonRSigRules$TotFiltredRules
})
# Display the total number of clusters and the total processing time
message("\nClearing rules Processing Time: ", t5["elapsed"], " seconds\n")
#>
#> Clearing rules Processing Time: 2.647 seconds
We clean the final set of rules to prepare for clustering. Then, we generate clusters based on the cleaned rules. The total identified clusters is 4 clusters.
t6 <- system.time({
cleanedRules <- arlc_clean_final_rules(NonRSigRules$FiltredRules)
clusters <- arlc_generate_clusters(cleanedRules)
clusters$TotClusters
})
# Display the total number of clusters and the total processing time
message("Cleaning final rules Processing Time: ", t6["elapsed"], " seconds\n")
#> Cleaning final rules Processing Time: 0.0250000000000057 seconds
Finally, we visualize the identified clusters.
arlc_clusters_plot(g$graph,
g$graphLabel,
clusters$Clusters)
#>
#> Total Identified Clusters: 4
#> =========================
#> Community 01:34 35 55
#> Community 02:646 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448
#> Community 03:914 917
#> Community 04:917 1202
#> =========================