README.md

--- [!] Sabali Documentation [!] ---

--- Install Sabali ---

Update Packages

update.packages(checkBuilt = TRUE, ask = FALSE)

Install devtools Package

if("devtools" %in% rownames(installed.packages()) == FALSE) { install.packages("devtools", dependencies = TRUE)} library(devtools)

Download sabali Package

devtools::install_github("sabalicodev/sabali") library(sabali)

Build Sabali

build.sabali()

Set Folder for Data | getwd() | setwd()

Mac = ~/ | PC = C:/

setwd("~/Desktop")

Create Data Folder

data.folder(getwd())

Set Folder for Data

working.folder <<- "~/Documents" # getwd()

--- Twitter Mining ---

Twitter API Authentication

app.name <- "App Name Here" api.key <- "XXXXXXXXXXXXXXXXXXXX" api.secret.key <- "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" access.token <- "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" access.token.secret <- "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"

guide.twitter()

connect.twitter()

Twitter Query [#1]

nk.name <<- "Chickfila"

nk.tweets <- search_tweets( q = "chickfila OR chikfila", n = 1000, retryonratelimit = FALSE, type = "mixed", include_rts = FALSE, lang="en")

Sys.sleep(1)

Twitter Query [#2]

cbo.name <<- "Popeyes"

cbo.tweets <- search_tweets( q = "popeyes OR popeye", n = 1000, retryonratelimit = FALSE, type = "mixed", include_rts = FALSE, lang="en")

save.tweets()

clear()

Tweet Data from CSV

Read File 1

nk.tweets <- read.csv("~/Desktop/file1.csv")

Read File 2

cbo.tweets <- read.csv("~/Desktop/file2.csv")

--- Tweet Frequency ---

Set Global Frequency Time

frequency.time <<- "hours"

Set Global X-Axis Date Breaks

date.breaks <<- "12 hours"

Options: [secs, mins, hours, days, weeks, months, years]

Frequency Line Graph

tweet.freq.line(nk.tweets, nk.name, frequency.time, date.breaks)

Frequency Line Graph + Trend Line

tweet.freq.line(nk.tweets, nk.name, frequency.time, date.breaks, trend)

Combined Frequency Line Graph

tweet.freq.line(d = nk.tweets, dn = nk.name, f = frequency.time, b = date.breaks, c = cbo.tweets, cn = cbo.name)

Combined Frequency Line Graph + Trend Line

tweet.freq.line(nk.tweets, nk.name, frequency.time, date.breaks, trend, cbo.tweets, cbo.name)

Frequency Area Graph

tweet.freq.area(cbo.tweets, cbo.name, frequency.time, date.breaks)

Frequency Area Graph + Trend Line

tweet.freq.area(cbo.tweets, cbo.name, frequency.time, date.breaks, trend)

Combined Frequency Area Graph

tweet.freq.area(d = nk.tweets, dn = nk.name, f = frequency.time, b = date.breaks, c = cbo.tweets, cn = cbo.name)

Combined Frequency Area Graph + Trend Line

tweet.freq.area(nk.tweets, nk.name, frequency.time, date.breaks, trend, cbo.tweets, cbo.name)

--- Geolocation Maps ---

Basic Geolocation Map

geolocation.map(nk.tweets, nk.name)

geolocation.map(cbo.tweets, cbo.name)

Interactive HTML Geolocation Map

geolocation.map(nk.tweets, nk.name, HTML)

geolocation.map(cbo.tweets, cbo.name, HTML)

--- Create Corpus ---

List of Stopwords Languages

stopwords.languages()

Stopwords to Remove from Corpus

nk.stopwords.language <- "english"

Remove Words from Stopwords List

nk.remove.stopwords <- "c('word1')"

Remove Custom Words from Corpus (Lowercase)

nk.auto.remove <- c(paste0(tolower(c(nk.name)))) nk.remove.words <- c(paste0(nk.auto.remove), 'word1')

Build Corpus

create.corpus(d = nk.tweets, cw = nk.remove.words) nk.corpus <- corpus.ultimus

Sys.sleep(1)

Stopwords to Remove from Corpus

cbo.stopwords.language <- "english"

Remove Words from Stopwords List

cbo.remove.stopwords <- "c('word1')"

Remove Custom Words from Corpus (Lowercase)

cbo.auto.remove <- c(paste0(tolower(c(cbo.name)))) cbo.remove.words <- c(paste0(cbo.auto.remove), 'word1')

Build Corpus

create.corpus(d = cbo.tweets, cw = cbo.remove.words) cbo.corpus <- corpus.ultimus

--- Word Frequency ---

Word Frequency Histogram

word.frequency(nk.corpus, nk.name)

word.frequency(cbo.corpus, cbo.name)

--- Word Clouds ---

create.wordcloud(nk.corpus)

create.wordcloud(nk.corpus, HTML)

create.wordcloud(cbo.corpus)

create.wordcloud(cbo.corpus, HTML)

--- Word Associations --- #

Word Association Searches

nk.word.search.1 <<- "trump" nk.word.search.2 <<- "general" nk.word.search.3 <<- "strike"

word.associations(nk.corpus, nk.name, "word1") word.associations(nk.corpus, nk.name, nk.word.search.1) word.associations(nk.corpus, nk.name, "word1", "word2") word.associations(nk.corpus, nk.name, nk.word.search.1, nk.word.search.2, nk.word.search.3)

Word Association Searches

cbo.word.search.1 <<- "word1" cbo.word.search.2 <<- "word2" cbo.word.search.3 <<- "word3"

--- Dendrograms --- #

dendrogram(nk.corpus, nk.name)

dendrogram(cbo.corpus, cbo.name)

--- Sentiment Analysis --- #

sentiment.analysis(nk.tweets, nk.corpus) nk.sentiment <- sentiment

Sys.sleep(1)

sentiment.analysis(cbo.tweets, cbo.corpus) cbo.sentiment <- sentiment

--- Sentiment Plots --- #

sentiment.plot(nk.sentiment, nk.name)

sentiment.plot(cbo.sentiment, cbo.name)

--- Sentiment Barcharts --- #

sentiment.barchart(nk.sentiment, nk.name)

sentiment.barchart(cbo.sentiment, cbo.name)

sentiment.barchart(nk.sentiment, nk.name, cbo.sentiment, cbo.name)

--- Sentiment Time Series --- #

sentiment.timeseries(nk.sentiment, nk.name)

sentiment.timeseries(cbo.sentiment, cbo.name)

sentiment.timeseries(nk.sentiment, nk.name, cbo.sentiment, cbo.name)

--- Sentiment Joyplots --- #

sentiment.joyplot(nk.tweets, nk.name)

sentiment.joyplot(cbo.tweets, cbo.name)

--- Sentiment Word Frequency --- #

sentiment.wordfreq(nk.tweets, nk.name)

sentiment.wordfreq(cbo.tweets, cbo.name)

--- Sentiment Statistics ---

Descriptive Statistics

summary(omit.zeros(nk.sentiment))

summary(omit.zeros(cbo.sentiment))

--- [!] Rapid Script End [!] ---



sabalicodev/sabali documentation built on Jan. 13, 2020, 2:22 p.m.