knitr::opts_chunk$set( collapse = TRUE, comment = "#>" )
library(arlclustering) #library(igraph)
The Karate Club social network dataset is provided as a gml file, containing 34 nodes and 78 edges.
# Start the timer t1 <- system.time({ dataset_path <- system.file("extdata", "karate.gml", package = "arlclustering") if (dataset_path == "") { stop("karate.gml file not found") } g <- arlc_get_network_dataset(dataset_path, "Karate Club") g$graphLabel g$totalNodes g$totalEdges g$averageDegree }) # Display the total processing time message("Graph loading Processing Time: ", t1["elapsed"], " seconds\n")
Next, we generate the transactions datasets from the graph g. The length of the filtred transactional dataset is 28.
# Start the timer t2 <- system.time({ transactions <- arlc_gen_transactions(g$graph) transactions }) # Display the total processing time message("Transaction dataset Processing Time: ", t2["elapsed"], " seconds\n")
We obtain the apriori thresholds for the generated transactions. The following are the thresholds for the apriori execution: - The Minimum Support : 0.1 - The Minimum Confidence : 0.5 - The Lift : 7 - The Gross Rules length : 66 - The selection Ratio : 2
Graph loading Processing Time: 0.014 seconds.
# Start the timer t3 <- system.time({ params <- arlc_get_apriori_thresholds(transactions, supportRange = seq(0.1, 0.2, by = 0.1), Conf = 0.5) params$minSupp params$minConf params$bestLift params$lenRules params$ratio }) # Display the total processing time message("Graph loading Processing Time: ", t3["elapsed"], " seconds\n")
We use the obtained parameters to generate gross rules.
# Start the timer t4 <- system.time({ minLenRules <- 1 maxLenRules <- params$lenRules if (!is.finite(maxLenRules) || maxLenRules > 5*length(transactions)) { maxLenRules <- 5*length(transactions) } grossRules <- arlc_gen_gross_rules(transactions, minSupp = params$minSupp, minConf = params$minConf, minLenRules = minLenRules+1, maxLenRules = maxLenRules) }) # Display the total number of clusters and the total processing time message("Gross rules generation Time: ", t4["elapsed"], " seconds\n")
We filter out redundant rules from the generated gross rules. Next, we filter out non-significant rules from the non-redundant rules, and we obtain the 50 rule items.
t5 <- system.time({ NonRedRules <- arlc_get_NonR_rules(grossRules$GrossRules) NonRSigRules <- arlc_get_significant_rules(transactions, NonRedRules$FiltredRules) NonRSigRules$TotFiltredRules }) # Display the total number of clusters and the total processing time message("Clearing rules Processing Time: ", t5["elapsed"], " seconds\n")
We clean the final set of rules to prepare for clustering. Then, we generate clusters based on the cleaned rules. The total identified clusters is 12 clusters.
t6 <- system.time({ cleanedRules <- arlc_clean_final_rules(NonRSigRules$FiltredRules) clusters <- arlc_generate_clusters(cleanedRules) clusters$TotClusters }) # Display the total number of clusters and the total processing time message("Cleaning final rules Processing Time: ", t6["elapsed"], " seconds\n") message("The total comsumed time is:",t1["elapsed"]+ t2["elapsed"]+t3["elapsed"]+t4["elapsed"]+t5["elapsed"]+t6["elapsed"], "seconds\n")
Finally, we visualize the identified clusters.
arlc_clusters_plot(g$graph, g$graphLabel, clusters$Clusters)
Any scripts or data that you put into this service are public.
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.