knitr::opts_chunk$set( collapse = TRUE, comment = "#>" )
library(arlclustering) #library(igraph)
The Dolphins network dataset is provided as a gml file, containing 62 nodes and 159 edges.
# Start the timer t1 <- system.time({ dataset_path <- system.file("extdata", "dolphins.gml", package = "arlclustering") if (dataset_path == "") { stop("dolphins.gml file not found") } g <- arlc_get_network_dataset(dataset_path, "Dolphins") g$graphLabel g$totalEdges g$totalNodes g$averageDegree }) # Display the total processing time message("Graph loading Processing Time: ", t1["elapsed"], " seconds\n")
Next, we generate transactions from the graph, with a total rows of 53.
# Start the timer t2 <- system.time({ transactions <- arlc_gen_transactions(g$graph) transactions }) # Display the total processing time message("Transaction dataset Processing Time: ", t2["elapsed"], " seconds\n")
We obtain the apriori thresholds for the generated transactions. The following are the thresholds for the apriori execution: - The Minimum Support : 0.05 - The Minimum Confidence : 0.5 - The Lift : 13.25 - The Gross Rules length : 201 - The selection Ratio : 4
# Start the timer t3 <- system.time({ params <- arlc_get_apriori_thresholds(transactions, supportRange = seq(0.05, 0.07, by = 0.01), Conf = 0.5) params$minSupp params$minConf params$bestLift params$lenRules params$ratio }) # Display the total processing time message("Graph loading Processing Time: ", t3["elapsed"], " seconds\n")
We use the obtained parameters to generate gross rules, where we obtain 201 rules.
# Start the timer t4 <- system.time({ minLenRules <- 1 maxLenRules <- params$lenRules if (!is.finite(maxLenRules) || maxLenRules > 5*length(transactions)) { maxLenRules <- 5*length(transactions) } grossRules <- arlc_gen_gross_rules(transactions, minSupp = params$minSupp, minConf = params$minConf, minLenRules = minLenRules+1, maxLenRules = maxLenRules) #grossRules$TotalRulesWithLengthFilter }) # Display the total number of clusters and the total processing time message("Gross rules generation Time: ", t4["elapsed"], " seconds\n")
We filter out redundant rules from the generated gross rules. Next, we filter out non-significant rules from the non-redundant rules, and we obtain the 172 rule items.
t5 <- system.time({ NonRedRules <- arlc_get_NonR_rules(grossRules$GrossRules) NonRSigRules <- arlc_get_significant_rules(transactions, NonRedRules$FiltredRules) #NonRSigRules$TotFiltredRules }) # Display the total number of clusters and the total processing time message("\nClearing rules Processing Time: ", t5["elapsed"], " seconds\n")
We clean the final set of rules to prepare for clustering. Then, we generate clusters based on the cleaned rules. The total identified clusters is 17 clusters.
t6 <- system.time({ cleanedRules <- arlc_clean_final_rules(NonRSigRules$FiltredRules) clusters <- arlc_generate_clusters(cleanedRules) #clusters$TotClusters }) # Display the total number of clusters and the total processing time message("Cleaning final rules Processing Time: ", t6["elapsed"], " seconds\n") message("The total comsumed time is:",t1["elapsed"]+ t2["elapsed"]+t3["elapsed"]+t4["elapsed"]+t5["elapsed"]+t6["elapsed"], "seconds\n")
Finally, we visualize the identified clusters.
arlc_clusters_plot(g$graph, g$graphLabel, clusters$Clusters)
Any scripts or data that you put into this service are public.
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.