--- title: "ARLClustering - Testing Dolphins dataset" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{ARLClustering - Testing Dolphins dataset} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` ```{r setup} library(arlclustering) #library(igraph) ``` ## Dataset description The Dolphins network dataset is provided as a gml file, containing 62 nodes and 159 edges. ## Loading network dataset - Graph Label : Dolphins Network - Total Nodes : 62 - Total Edges : 159 - Average Degree : 5.129032 ```{r} # Start the timer t1 <- system.time({ dataset_path <- system.file("extdata", "dolphins.gml", package = "arlclustering") if (dataset_path == "") { stop("dolphins.gml file not found") } g <- arlc_get_network_dataset(dataset_path, "Dolphins") g$graphLabel g$totalEdges g$totalNodes g$averageDegree }) # Display the total processing time message("Graph loading Processing Time: ", t1["elapsed"], " seconds\n") ``` ## Generate Transactions Next, we generate transactions from the graph, with a total rows of 53. ```{r} # Start the timer t2 <- system.time({ transactions <- arlc_gen_transactions(g$graph) transactions }) # Display the total processing time message("Transaction dataset Processing Time: ", t2["elapsed"], " seconds\n") ``` ## Get Apriori Thresholds We obtain the apriori thresholds for the generated transactions. The following are the thresholds for the apriori execution: - The Minimum Support : 0.05 - The Minimum Confidence : 0.5 - The Lift : 13.25 - The Gross Rules length : 201 - The selection Ratio : 4 ```{r} # Start the timer t3 <- system.time({ params <- arlc_get_apriori_thresholds(transactions, supportRange = seq(0.05, 0.07, by = 0.01), Conf = 0.5) params$minSupp params$minConf params$bestLift params$lenRules params$ratio }) # Display the total processing time message("Graph loading Processing Time: ", t3["elapsed"], " seconds\n") ``` ## Generate Gross Rules We use the obtained parameters to generate gross rules, where we obtain 201 rules. ```{r} # Start the timer t4 <- system.time({ minLenRules <- 1 maxLenRules <- params$lenRules if (!is.finite(maxLenRules) || maxLenRules > 5*length(transactions)) { maxLenRules <- 5*length(transactions) } grossRules <- arlc_gen_gross_rules(transactions, minSupp = params$minSupp, minConf = params$minConf, minLenRules = minLenRules+1, maxLenRules = maxLenRules) #grossRules$TotalRulesWithLengthFilter }) # Display the total number of clusters and the total processing time message("Gross rules generation Time: ", t4["elapsed"], " seconds\n") ``` ## Filter Significant and Non-Redundant Rules We filter out redundant rules from the generated gross rules. Next, we filter out non-significant rules from the non-redundant rules, and we obtain the 172 rule items. ```{r} t5 <- system.time({ NonRedRules <- arlc_get_NonR_rules(grossRules$GrossRules) NonRSigRules <- arlc_get_significant_rules(transactions, NonRedRules$FiltredRules) #NonRSigRules$TotFiltredRules }) # Display the total number of clusters and the total processing time message("\nClearing rules Processing Time: ", t5["elapsed"], " seconds\n") ``` ## Clean and genarate final Rules We clean the final set of rules to prepare for clustering. Then, we generate clusters based on the cleaned rules. The total identified clusters is 17 clusters. ```{r} t6 <- system.time({ cleanedRules <- arlc_clean_final_rules(NonRSigRules$FiltredRules) clusters <- arlc_generate_clusters(cleanedRules) #clusters$TotClusters }) # Display the total number of clusters and the total processing time message("Cleaning final rules Processing Time: ", t6["elapsed"], " seconds\n") message("The total comsumed time is:",t1["elapsed"]+ t2["elapsed"]+t3["elapsed"]+t4["elapsed"]+t5["elapsed"]+t6["elapsed"], "seconds\n") ``` ## Plot Clusters Finally, we visualize the identified clusters. ```{r} arlc_clusters_plot(g$graph, g$graphLabel, clusters$Clusters) ```